mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
203 Commits
claude/css
...
dylan/test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec2cf94da8 | ||
|
|
959169dfaf | ||
|
|
461ad886bd | ||
|
|
b6abbd50a0 | ||
|
|
beccd01647 | ||
|
|
35eb53994a | ||
|
|
ebf39e9811 | ||
|
|
b610e80ee0 | ||
|
|
7076a49bb1 | ||
|
|
d4a966f8ae | ||
|
|
7704dca660 | ||
|
|
1e0f51ddcc | ||
|
|
32a76904fe | ||
|
|
367eeb308e | ||
|
|
1879b7eeca | ||
|
|
70fa6af355 | ||
|
|
eb5b498c62 | ||
|
|
596e83c918 | ||
|
|
3842a5ee18 | ||
|
|
50daf5df27 | ||
|
|
c90c0e69cb | ||
|
|
24b97994e3 | ||
|
|
dda9a9b0fd | ||
|
|
eeef013365 | ||
|
|
65d006aae0 | ||
|
|
8b59b8d17d | ||
|
|
a1f1252771 | ||
|
|
bf1e4922b4 | ||
|
|
fbf47d0256 | ||
|
|
f83214e0a9 | ||
|
|
81debb4269 | ||
|
|
962ac0c2fd | ||
|
|
bdc95c2dc5 | ||
|
|
29a6c0d263 | ||
|
|
39e2c22e1a | ||
|
|
b20a70dc40 | ||
|
|
1f22f4447d | ||
|
|
ff590e9cfd | ||
|
|
18f242daa1 | ||
|
|
fbc175692f | ||
|
|
22315000e0 | ||
|
|
bc02c18dc5 | ||
|
|
4c492c66b8 | ||
|
|
46801ec926 | ||
|
|
5617b92a5a | ||
|
|
c6a73fc23e | ||
|
|
3de2dc1287 | ||
|
|
370e6fb9fa | ||
|
|
91f7a94d84 | ||
|
|
9ab6365a13 | ||
|
|
bf937f7294 | ||
|
|
ce9788716f | ||
|
|
4301af9f3e | ||
|
|
8d1de78c7e | ||
|
|
27ff6aaae0 | ||
|
|
779764332a | ||
|
|
0141a4fac9 | ||
|
|
113830d3cf | ||
|
|
d9ae93e025 | ||
|
|
604c83c8a6 | ||
|
|
370b25c086 | ||
|
|
538be1399c | ||
|
|
d04b86d34f | ||
|
|
37fc8e99f7 | ||
|
|
6b5de25d8a | ||
|
|
7b49654db6 | ||
|
|
603bbd18a0 | ||
|
|
1d7cb4bbad | ||
|
|
01de0ecbd9 | ||
|
|
d3a5f2eef2 | ||
|
|
b51e993bc2 | ||
|
|
92f105dbe1 | ||
|
|
d0bd1b121f | ||
|
|
81b4a40fbd | ||
|
|
5715b54614 | ||
|
|
28fd495b39 | ||
|
|
699d8b1e1c | ||
|
|
2247c3859a | ||
|
|
08e03814e5 | ||
|
|
0dd4f025b6 | ||
|
|
79067037ff | ||
|
|
822d75a380 | ||
|
|
bffccf3d5f | ||
|
|
0300150324 | ||
|
|
34a1e2adad | ||
|
|
8484e1b827 | ||
|
|
3898ed5e3f | ||
|
|
c08ffadf56 | ||
|
|
fa983247b2 | ||
|
|
99b0a16c33 | ||
|
|
085e25d5d1 | ||
|
|
ce5c336ea5 | ||
|
|
05b12e0ed0 | ||
|
|
d9459f8540 | ||
|
|
e79b512a9d | ||
|
|
9902039b1f | ||
|
|
f3fd7506ef | ||
|
|
c21c51a0ff | ||
|
|
0bbf6c74b5 | ||
|
|
57cbbc09e4 | ||
|
|
7f589ffb4b | ||
|
|
cea59d7fc0 | ||
|
|
4ea1454e4a | ||
|
|
8941a363c3 | ||
|
|
722ac3aa5a | ||
|
|
a333d02f84 | ||
|
|
c1acb0b9a4 | ||
|
|
ffd2240c31 | ||
|
|
fa5a5bbe55 | ||
|
|
1e86cebd74 | ||
|
|
bc47f87450 | ||
|
|
698b004ea4 | ||
|
|
b135c207ed | ||
|
|
a1dd26d7db | ||
|
|
7c06320d0f | ||
|
|
dd04c57258 | ||
|
|
344b2c1dfe | ||
|
|
aef0b5b4a6 | ||
|
|
740fb23315 | ||
|
|
2dd997c4b5 | ||
|
|
4061e1cb4f | ||
|
|
6386eef8aa | ||
|
|
3394fd3bdd | ||
|
|
5a8cdc08f0 | ||
|
|
dcc3386611 | ||
|
|
8dc79641c8 | ||
|
|
d865ef41e2 | ||
|
|
e66b4639bd | ||
|
|
8698d25c52 | ||
|
|
81a5c79928 | ||
|
|
fa996ad1a8 | ||
|
|
ed1d6e595c | ||
|
|
7c98b0f440 | ||
|
|
f0d18d73c9 | ||
|
|
a5712b92b8 | ||
|
|
7dcd49f832 | ||
|
|
c59a6997cd | ||
|
|
1d50af7fe8 | ||
|
|
98cee5a57e | ||
|
|
ac0099ebc6 | ||
|
|
64146d47f9 | ||
|
|
a2d8b75962 | ||
|
|
a15fe76bf2 | ||
|
|
8dc084af5f | ||
|
|
2028e21d60 | ||
|
|
f25ea59683 | ||
|
|
55c6afb498 | ||
|
|
0aca002161 | ||
|
|
4980736786 | ||
|
|
3af0d23d53 | ||
|
|
9c96937329 | ||
|
|
d4eaaf8363 | ||
|
|
e1aa437694 | ||
|
|
73c3f0004f | ||
|
|
b80cb629c6 | ||
|
|
8773f7ab65 | ||
|
|
5eb2145b31 | ||
|
|
cde167cacd | ||
|
|
6ce419d3f8 | ||
|
|
05508a627d | ||
|
|
23383b32b0 | ||
|
|
0d5a7c36ed | ||
|
|
b4c8379447 | ||
|
|
438aaf9e95 | ||
|
|
4d60b6f69d | ||
|
|
e9e93244cb | ||
|
|
800a937cc2 | ||
|
|
830fd9b0ae | ||
|
|
fe0aba79f4 | ||
|
|
a4aaec5b2f | ||
|
|
24bc8aa416 | ||
|
|
2ab6efeea3 | ||
|
|
6745bdaa85 | ||
|
|
dce7a02f4d | ||
|
|
9c420c9eff | ||
|
|
9c2ca4b8fd | ||
|
|
e624f1e571 | ||
|
|
27381063b6 | ||
|
|
9ca8de6eb9 | ||
|
|
fdcfac6a75 | ||
|
|
ce1981c525 | ||
|
|
cc3fc5a1d3 | ||
|
|
d83e0eb1f1 | ||
|
|
72b9525507 | ||
|
|
0f7494569e | ||
|
|
9fd6b54c10 | ||
|
|
19acc4dcac | ||
|
|
56da7c4fd9 | ||
|
|
5bdb8ec0cb | ||
|
|
4cf9b794c9 | ||
|
|
998ec54da9 | ||
|
|
0305f3d4d2 | ||
|
|
1006a4fac2 | ||
|
|
c7f7d9bb82 | ||
|
|
37bce389a0 | ||
|
|
bab583497c | ||
|
|
a83fceafc7 | ||
|
|
ef8eef3df8 | ||
|
|
69b571da41 | ||
|
|
908ab9ce30 | ||
|
|
43c46b1f77 | ||
|
|
a0c5f3dc69 | ||
|
|
5965ff18ea |
@@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
@@ -35,7 +35,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& wget https://apt.llvm.org/llvm.sh \
|
||||
&& chmod +x llvm.sh \
|
||||
&& ./llvm.sh ${LLVM_VERSION} all \
|
||||
&& rm llvm.sh
|
||||
&& rm llvm.sh \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
@@ -48,14 +49,6 @@ RUN --mount=type=tmpfs,target=/tmp \
|
||||
wget -O /tmp/cmake.sh "$cmake_url" && \
|
||||
sh /tmp/cmake.sh --skip-license --prefix=/usr
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
sccache_version="0.12.0" && \
|
||||
arch=$(uname -m) && \
|
||||
sccache_url="https://github.com/mozilla/sccache/releases/download/v${sccache_version}/sccache-v${sccache_version}-${arch}-unknown-linux-musl.tar.gz" && \
|
||||
wget -O /tmp/sccache.tar.gz "$sccache_url" && \
|
||||
tar -xzf /tmp/sccache.tar.gz -C /tmp && \
|
||||
install -m755 /tmp/sccache-v${sccache_version}-${arch}-unknown-linux-musl/sccache /usr/local/bin
|
||||
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
|
||||
@@ -134,9 +127,7 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64";
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
# The following is necessary to configure buildkite to use a stable
|
||||
# checkout directory. sccache hashes absolute paths into its cache keys,
|
||||
# so if buildkite uses a different checkout path each time (which it does
|
||||
# by default), sccache will be useless.
|
||||
# checkout directory for ccache to be effective.
|
||||
RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \
|
||||
cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment
|
||||
#!/bin/sh
|
||||
|
||||
@@ -31,7 +31,7 @@ import {
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows" | "freebsd"} Os
|
||||
* @typedef {"linux" | "darwin" | "windows"} Os
|
||||
* @typedef {"aarch64" | "x64"} Arch
|
||||
* @typedef {"musl"} Abi
|
||||
* @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
|
||||
@@ -114,7 +114,6 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
{ os: "freebsd", arch: "x64", release: "14.3" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -125,16 +124,13 @@ const testPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "13", tier: "previous" },
|
||||
{ os: "darwin", arch: "x64", release: "14", tier: "latest" },
|
||||
{ os: "darwin", arch: "x64", release: "13", tier: "previous" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "13", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
@@ -575,6 +571,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
if (buildId) {
|
||||
args.push(`--build-id=${buildId}`);
|
||||
}
|
||||
|
||||
if (testFiles) {
|
||||
args.push(...testFiles.map(testFile => `--include=${testFile}`));
|
||||
}
|
||||
@@ -591,7 +588,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
agents: getTestAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: os === "darwin" ? 2 : 10,
|
||||
parallelism: os === "darwin" ? 2 : 20,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
@@ -660,7 +657,7 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
agents: {
|
||||
queue: "test-darwin",
|
||||
},
|
||||
depends_on: buildPlatforms.filter(p => p.os !== "freebsd").map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
env: {
|
||||
CANARY: revision,
|
||||
},
|
||||
@@ -1111,9 +1108,6 @@ async function getPipeline(options = {}) {
|
||||
? buildPlatforms
|
||||
: buildPlatforms.filter(({ profile }) => profile !== "asan");
|
||||
|
||||
// run build-image but no build-bun yet
|
||||
relevantBuildPlatforms = relevantBuildPlatforms.filter(({ os }) => os !== "freebsd");
|
||||
|
||||
steps.push(
|
||||
...relevantBuildPlatforms.map(target => {
|
||||
const imageKey = getImageKey(target);
|
||||
|
||||
184
.claude/skills/implementing-jsc-classes-cpp/SKILL.md
Normal file
184
.claude/skills/implementing-jsc-classes-cpp/SKILL.md
Normal file
@@ -0,0 +1,184 @@
|
||||
---
|
||||
name: implementing-jsc-classes-cpp
|
||||
description: Implements JavaScript classes in C++ using JavaScriptCore. Use when creating new JS classes with C++ bindings, prototypes, or constructors.
|
||||
---
|
||||
|
||||
# Implementing JavaScript Classes in C++
|
||||
|
||||
## Class Structure
|
||||
|
||||
For publicly accessible Constructor and Prototype, create 3 classes:
|
||||
|
||||
1. **`class Foo : public JSC::DestructibleObject`** - if C++ fields exist; otherwise use `JSC::constructEmptyObject` with `putDirectOffset`
|
||||
2. **`class FooPrototype : public JSC::JSNonFinalObject`**
|
||||
3. **`class FooConstructor : public JSC::InternalFunction`**
|
||||
|
||||
No public constructor? Only Prototype and class needed.
|
||||
|
||||
## Iso Subspaces
|
||||
|
||||
Classes with C++ fields need subspaces in:
|
||||
|
||||
- `src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h`
|
||||
- `src/bun.js/bindings/webcore/DOMIsoSubspaces.h`
|
||||
|
||||
```cpp
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) {
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForMyClassT = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForMyClassT = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
```
|
||||
|
||||
## Property Definitions
|
||||
|
||||
```cpp
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsFooProtoFuncMethod);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsFooGetter_property);
|
||||
|
||||
static const HashTableValue JSFooPrototypeTableValues[] = {
|
||||
{ "property"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsFooGetter_property, 0 } },
|
||||
{ "method"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsFooProtoFuncMethod, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
## Prototype Class
|
||||
|
||||
```cpp
|
||||
class JSFooPrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) {
|
||||
JSFooPrototype* prototype = new (NotNull, allocateCell<JSFooPrototype>(vm)) JSFooPrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.plainObjectSpace(); }
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooPrototype(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure) {}
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
void JSFooPrototype::finishCreation(VM& vm) {
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSFoo::info(), JSFooPrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
```
|
||||
|
||||
## Getter/Setter/Function Definitions
|
||||
|
||||
```cpp
|
||||
// Getter
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsFooGetter_prop, (JSGlobalObject* globalObject, EncodedJSValue thisValue, PropertyName)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
JSFoo* thisObject = jsDynamicCast<JSFoo*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSFoo"_s, "prop"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(jsBoolean(thisObject->value()));
|
||||
}
|
||||
|
||||
// Function
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFooProtoFuncMethod, (JSGlobalObject* globalObject, CallFrame* callFrame)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto* thisObject = jsDynamicCast<JSFoo*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "Foo"_s, "method"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(thisObject->doSomething(vm, globalObject));
|
||||
}
|
||||
```
|
||||
|
||||
## Constructor Class
|
||||
|
||||
```cpp
|
||||
class JSFooConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype) {
|
||||
JSFooConstructor* constructor = new (NotNull, JSC::allocateCell<JSFooConstructor>(vm)) JSFooConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.internalFunctionSpace(); }
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooConstructor(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure, callFoo, constructFoo) {}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype) {
|
||||
Base::finishCreation(vm, 0, "Foo"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Structure Caching
|
||||
|
||||
Add to `ZigGlobalObject.h`:
|
||||
|
||||
```cpp
|
||||
JSC::LazyClassStructure m_JSFooClassStructure;
|
||||
```
|
||||
|
||||
Initialize in `ZigGlobalObject.cpp`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.initLater([](LazyClassStructure::Initializer& init) {
|
||||
Bun::initJSFooClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Visit in `visitChildrenImpl`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.visit(visitor);
|
||||
```
|
||||
|
||||
## Expose to Zig
|
||||
|
||||
```cpp
|
||||
extern "C" JSC::EncodedJSValue Bun__JSFooConstructor(Zig::GlobalObject* globalObject) {
|
||||
return JSValue::encode(globalObject->m_JSFooClassStructure.constructor(globalObject));
|
||||
}
|
||||
|
||||
extern "C" EncodedJSValue Bun__Foo__toJS(Zig::GlobalObject* globalObject, Foo* foo) {
|
||||
auto* structure = globalObject->m_JSFooClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSFoo::create(globalObject->vm(), structure, globalObject, WTFMove(foo)));
|
||||
}
|
||||
```
|
||||
|
||||
Include `#include "root.h"` at the top of C++ files.
|
||||
206
.claude/skills/implementing-jsc-classes-zig/SKILL.md
Normal file
206
.claude/skills/implementing-jsc-classes-zig/SKILL.md
Normal file
@@ -0,0 +1,206 @@
|
||||
---
|
||||
name: implementing-jsc-classes-zig
|
||||
description: Creates JavaScript classes using Bun's Zig bindings generator (.classes.ts). Use when implementing new JS APIs in Zig with JSC integration.
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
Bridge JavaScript and Zig through `.classes.ts` definitions and Zig implementations.
|
||||
|
||||
## Architecture
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files connecting them)
|
||||
|
||||
## Class Definition (.classes.ts)
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: { args: 1 },
|
||||
encoding: { getter: true, cache: true },
|
||||
fatal: { getter: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
- `name`: Class name
|
||||
- `constructor`: Has public constructor
|
||||
- `JSType`: "object", "function", etc.
|
||||
- `finalize`: Needs cleanup
|
||||
- `proto`: Properties/methods
|
||||
- `cache`: Cache property values via WriteBarrier
|
||||
|
||||
## Zig Implementation
|
||||
|
||||
```zig
|
||||
pub const TextDecoder = struct {
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
return bun.new(TextDecoder, .{ .encoding = "utf-8", .fatal = false });
|
||||
}
|
||||
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1 or args.ptr[0].isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**Key patterns:**
|
||||
|
||||
- Use `bun.JSError!JSValue` return type for error handling
|
||||
- Use `globalObject` not `ctx`
|
||||
- `deinit()` for cleanup, `finalize()` called by GC
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
```zig
|
||||
const args = callFrame.arguments();
|
||||
const first_arg = args.ptr[0]; // Access as slice
|
||||
const argCount = args.len;
|
||||
const thisValue = callFrame.thisValue();
|
||||
```
|
||||
|
||||
## Property Caching
|
||||
|
||||
For `cache: true` properties, generated accessors:
|
||||
|
||||
```zig
|
||||
// Get cached value
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero) return null;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Set cached value
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
```zig
|
||||
pub fn method(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
## Memory Management
|
||||
|
||||
```zig
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
this._encoding.deref();
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src());
|
||||
this.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
```
|
||||
|
||||
## Creating a New Binding
|
||||
|
||||
1. Define interface in `.classes.ts`:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: { args: 1 },
|
||||
myProperty: { getter: true, cache: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. Implement in `.zig`:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
pub fn constructor(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!*MyClass {
|
||||
return MyClass.new(.{ .value = "" });
|
||||
}
|
||||
|
||||
pub fn myMethod(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
return JSC.JSValue.jsUndefined();
|
||||
}
|
||||
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *MyClass) void {}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. Add to `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## Generated Components
|
||||
|
||||
- **C++ Classes**: `JSMyClass`, `JSMyClassPrototype`, `JSMyClassConstructor`
|
||||
- **Method Bindings**: `MyClassPrototype__myMethodCallback`
|
||||
- **Property Accessors**: `MyClassPrototype__myPropertyGetterWrap`
|
||||
- **Zig Bindings**: External function declarations, cached value accessors
|
||||
222
.claude/skills/writing-bundler-tests/SKILL.md
Normal file
222
.claude/skills/writing-bundler-tests/SKILL.md
Normal file
@@ -0,0 +1,222 @@
|
||||
---
|
||||
name: writing-bundler-tests
|
||||
description: Guides writing bundler tests using itBundled/expectBundled in test/bundler/. Use when creating or modifying bundler, transpiler, or code transformation tests.
|
||||
---
|
||||
|
||||
# Writing Bundler Tests
|
||||
|
||||
Bundler tests use `itBundled()` from `test/bundler/expectBundled.ts` to test Bun's bundler.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```typescript
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled, dedent } from "./expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
itBundled("category/TestName", {
|
||||
files: {
|
||||
"index.js": `console.log("hello");`,
|
||||
},
|
||||
run: {
|
||||
stdout: "hello",
|
||||
},
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Test ID format: `category/TestName` (e.g., `banner/CommentBanner`, `minify/Empty`)
|
||||
|
||||
## File Setup
|
||||
|
||||
```typescript
|
||||
{
|
||||
files: {
|
||||
"index.js": `console.log("test");`,
|
||||
"lib.ts": `export const foo = 123;`,
|
||||
"nested/file.js": `export default {};`,
|
||||
},
|
||||
entryPoints: ["index.js"], // defaults to first file
|
||||
runtimeFiles: { // written AFTER bundling
|
||||
"extra.js": `console.log("added later");`,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundler Options
|
||||
|
||||
```typescript
|
||||
{
|
||||
outfile: "/out.js",
|
||||
outdir: "/out",
|
||||
format: "esm" | "cjs" | "iife",
|
||||
target: "bun" | "browser" | "node",
|
||||
|
||||
// Minification
|
||||
minifyWhitespace: true,
|
||||
minifyIdentifiers: true,
|
||||
minifySyntax: true,
|
||||
|
||||
// Code manipulation
|
||||
banner: "// copyright",
|
||||
footer: "// end",
|
||||
define: { "PROD": "true" },
|
||||
external: ["lodash"],
|
||||
|
||||
// Advanced
|
||||
sourceMap: "inline" | "external",
|
||||
splitting: true,
|
||||
treeShaking: true,
|
||||
drop: ["console"],
|
||||
}
|
||||
```
|
||||
|
||||
## Runtime Verification
|
||||
|
||||
```typescript
|
||||
{
|
||||
run: {
|
||||
stdout: "expected output", // exact match
|
||||
stdout: /regex/, // pattern match
|
||||
partialStdout: "contains this", // substring
|
||||
stderr: "error output",
|
||||
exitCode: 1,
|
||||
env: { NODE_ENV: "production" },
|
||||
runtime: "bun" | "node",
|
||||
|
||||
// Runtime errors
|
||||
error: "ReferenceError: x is not defined",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundle Errors/Warnings
|
||||
|
||||
```typescript
|
||||
{
|
||||
bundleErrors: {
|
||||
"/file.js": ["error message 1", "error message 2"],
|
||||
},
|
||||
bundleWarnings: {
|
||||
"/file.js": ["warning message"],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Dead Code Elimination (DCE)
|
||||
|
||||
Add markers in source code:
|
||||
|
||||
```javascript
|
||||
// KEEP - this should survive
|
||||
const used = 1;
|
||||
|
||||
// REMOVE - this should be eliminated
|
||||
const unused = 2;
|
||||
```
|
||||
|
||||
```typescript
|
||||
{
|
||||
dce: true,
|
||||
dceKeepMarkerCount: 5, // expected KEEP markers
|
||||
}
|
||||
```
|
||||
|
||||
## Capture Pattern
|
||||
|
||||
Verify exact transpilation with `capture()`:
|
||||
|
||||
```typescript
|
||||
itBundled("string/Folding", {
|
||||
files: {
|
||||
"index.ts": `capture(\`\${1 + 1}\`);`,
|
||||
},
|
||||
capture: ['"2"'], // expected captured value
|
||||
minifySyntax: true,
|
||||
});
|
||||
```
|
||||
|
||||
## Post-Bundle Assertions
|
||||
|
||||
```typescript
|
||||
{
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("console.log");
|
||||
api.assertFileExists("out.js");
|
||||
|
||||
const content = api.readFile("out.js");
|
||||
expect(content).toMatchSnapshot();
|
||||
|
||||
const values = api.captureFile("out.js");
|
||||
expect(values).toEqual(["2"]);
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Simple output verification:**
|
||||
|
||||
```typescript
|
||||
itBundled("banner/Comment", {
|
||||
banner: "// copyright",
|
||||
files: { "a.js": `console.log("Hello")` },
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("// copyright");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Multi-file CJS/ESM interop:**
|
||||
|
||||
```typescript
|
||||
itBundled("cjs/ImportSyntax", {
|
||||
files: {
|
||||
"entry.js": `import lib from './lib.cjs'; console.log(lib);`,
|
||||
"lib.cjs": `exports.foo = 'bar';`,
|
||||
},
|
||||
run: { stdout: '{"foo":"bar"}' },
|
||||
});
|
||||
```
|
||||
|
||||
**Error handling:**
|
||||
|
||||
```typescript
|
||||
itBundled("edgecase/InvalidLoader", {
|
||||
files: { "index.js": `...` },
|
||||
bundleErrors: {
|
||||
"index.js": ["Unsupported loader type"],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Test Organization
|
||||
|
||||
```text
|
||||
test/bundler/
|
||||
├── bundler_banner.test.ts
|
||||
├── bundler_string.test.ts
|
||||
├── bundler_minify.test.ts
|
||||
├── bundler_cjs.test.ts
|
||||
├── bundler_edgecase.test.ts
|
||||
├── bundler_splitting.test.ts
|
||||
├── css/
|
||||
├── transpiler/
|
||||
└── expectBundled.ts
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
bun bd test test/bundler/bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_FILTER="banner/Comment" bun bd test bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_DEBUG=1 bun bd test bundler_minify.test.ts
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Use `dedent` for readable multi-line code
|
||||
- File paths are relative (e.g., `/index.js`)
|
||||
- Use `capture()` to verify exact transpilation results
|
||||
- Use `.toMatchSnapshot()` for complex outputs
|
||||
- Pass array to `run` for multiple test scenarios
|
||||
94
.claude/skills/writing-dev-server-tests/SKILL.md
Normal file
94
.claude/skills/writing-dev-server-tests/SKILL.md
Normal file
@@ -0,0 +1,94 @@
|
||||
---
|
||||
name: writing-dev-server-tests
|
||||
description: Guides writing HMR/Dev Server tests in test/bake/. Use when creating or modifying dev server, hot reloading, or bundling tests.
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server Tests
|
||||
|
||||
Dev server tests validate hot-reloading robustness and reliability.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities: `devTest`, `prodTest`, `devAndProductionTest`, `Dev` class, `Client` class
|
||||
- `test/bake/client-fixture.mjs` - subprocess for `Client` (page loading, IPC queries)
|
||||
- `test/bake/dev/*.test.ts` - dev server and hot reload tests
|
||||
- `test/bake/dev-and-prod.ts` - tests running on both dev and production mode
|
||||
|
||||
## Test Categories
|
||||
|
||||
- `bundle.test.ts` - DevServer-specific bundling bugs
|
||||
- `css.test.ts` - CSS bundling issues
|
||||
- `plugins.test.ts` - development mode plugins
|
||||
- `ecosystem.test.ts` - library compatibility (prefer concrete bugs over full package tests)
|
||||
- `esm.test.ts` - ESM features in development
|
||||
- `html.test.ts` - HTML file handling
|
||||
- `react-spa.test.ts` - React, react-refresh transform, server components
|
||||
- `sourcemap.test.ts` - source map correctness
|
||||
|
||||
## devTest Basics
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `console.log("hello");`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", { find: "Hello", replace: "World" });
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", { find: "World", replace: "Bar" });
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Key APIs
|
||||
|
||||
- **`files`**: Initial filesystem state
|
||||
- **`dev.fetch()`**: HTTP requests
|
||||
- **`dev.client()`**: Opens browser instance
|
||||
- **`dev.write/patch/delete`**: Filesystem mutations (wait for hot-reload automatically)
|
||||
- **`c.expectMessage()`**: Assert console.log output
|
||||
- **`c.expectReload()`**: Wrap code that causes hard reload
|
||||
|
||||
**Important**: Use `dev.write/patch/delete` instead of `node:fs` - they wait for hot-reload.
|
||||
|
||||
## Testing Errors
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `<!DOCTYPE html><html><head></head><body><script type="module" src="/script.ts"></script></body></html>`,
|
||||
"script.ts": `import data from "./data"; console.log(data);`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Specify expected errors with the `errors` option:
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
268
.claude/skills/zig-system-calls/SKILL.md
Normal file
268
.claude/skills/zig-system-calls/SKILL.md
Normal file
@@ -0,0 +1,268 @@
|
||||
---
|
||||
name: zig-system-calls
|
||||
description: Guides using bun.sys for system calls and file I/O in Zig. Use when implementing file operations instead of std.fs or std.posix.
|
||||
---
|
||||
|
||||
# System Calls & File I/O in Zig
|
||||
|
||||
Use `bun.sys` instead of `std.fs` or `std.posix` for cross-platform syscalls with proper error handling.
|
||||
|
||||
## bun.sys.File (Preferred)
|
||||
|
||||
For most file operations, use the `bun.sys.File` wrapper:
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
const file = switch (File.open(path, bun.O.RDWR, 0o644)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
// Read/write
|
||||
_ = try file.read(buffer).unwrap();
|
||||
_ = try file.writeAll(data).unwrap();
|
||||
|
||||
// Get file info
|
||||
const stat = try file.stat().unwrap();
|
||||
const size = try file.getEndPos().unwrap();
|
||||
|
||||
// std.io compatible
|
||||
const reader = file.reader();
|
||||
const writer = file.writer();
|
||||
```
|
||||
|
||||
### Complete Example
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
pub fn writeFile(path: [:0]const u8, data: []const u8) File.WriteError!void {
|
||||
const file = switch (File.open(path, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
_ = switch (file.writeAll(data)) {
|
||||
.result => {},
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Why bun.sys?
|
||||
|
||||
| Aspect | bun.sys | std.fs/std.posix |
|
||||
| ----------- | -------------------------------- | ------------------- |
|
||||
| Return Type | `Maybe(T)` with detailed Error | Generic error union |
|
||||
| Windows | Full support with libuv fallback | Limited/POSIX-only |
|
||||
| Error Info | errno, syscall tag, path, fd | errno only |
|
||||
| EINTR | Automatic retry | Manual handling |
|
||||
|
||||
## Error Handling with Maybe(T)
|
||||
|
||||
`bun.sys` functions return `Maybe(T)` - a tagged union:
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Pattern 1: Switch on result/error
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |bytes_read| {
|
||||
// use bytes_read
|
||||
},
|
||||
.err => |err| {
|
||||
// err.errno, err.syscall, err.fd, err.path
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
// handle EAGAIN
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Pattern 2: Unwrap with try (converts to Zig error)
|
||||
const bytes = try sys.read(fd, buffer).unwrap();
|
||||
|
||||
// Pattern 3: Unwrap with default
|
||||
const value = sys.stat(path).unwrapOr(default_stat);
|
||||
```
|
||||
|
||||
## Low-Level File Operations
|
||||
|
||||
Only use these when `bun.sys.File` doesn't meet your needs.
|
||||
|
||||
### Opening Files
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Use bun.O flags (cross-platform normalized)
|
||||
const fd = switch (sys.open(path, bun.O.RDONLY, 0)) {
|
||||
.result => |fd| fd,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer fd.close();
|
||||
|
||||
// Common flags
|
||||
bun.O.RDONLY, bun.O.WRONLY, bun.O.RDWR
|
||||
bun.O.CREAT, bun.O.TRUNC, bun.O.APPEND
|
||||
bun.O.NONBLOCK, bun.O.DIRECTORY
|
||||
```
|
||||
|
||||
### Reading & Writing
|
||||
|
||||
```zig
|
||||
// Single read (may return less than buffer size)
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |n| { /* n bytes read */ },
|
||||
.err => |err| { /* handle error */ },
|
||||
}
|
||||
|
||||
// Read until EOF or buffer full
|
||||
const total = try sys.readAll(fd, buffer).unwrap();
|
||||
|
||||
// Position-based read/write
|
||||
sys.pread(fd, buffer, offset)
|
||||
sys.pwrite(fd, data, offset)
|
||||
|
||||
// Vector I/O
|
||||
sys.readv(fd, iovecs)
|
||||
sys.writev(fd, iovecs)
|
||||
```
|
||||
|
||||
### File Info
|
||||
|
||||
```zig
|
||||
sys.stat(path) // Follow symlinks
|
||||
sys.lstat(path) // Don't follow symlinks
|
||||
sys.fstat(fd) // From file descriptor
|
||||
sys.fstatat(fd, path)
|
||||
|
||||
// Linux-only: faster selective stat
|
||||
sys.statx(path, &.{ .size, .mtime })
|
||||
```
|
||||
|
||||
### Path Operations
|
||||
|
||||
```zig
|
||||
sys.unlink(path)
|
||||
sys.unlinkat(dir_fd, path)
|
||||
sys.rename(from, to)
|
||||
sys.renameat(from_dir, from, to_dir, to)
|
||||
sys.readlink(path, buf)
|
||||
sys.readlinkat(fd, path, buf)
|
||||
sys.link(T, src, dest)
|
||||
sys.linkat(src_fd, src, dest_fd, dest)
|
||||
sys.symlink(target, dest)
|
||||
sys.symlinkat(target, dirfd, dest)
|
||||
sys.mkdir(path, mode)
|
||||
sys.mkdirat(dir_fd, path, mode)
|
||||
sys.rmdir(path)
|
||||
```
|
||||
|
||||
### Permissions
|
||||
|
||||
```zig
|
||||
sys.chmod(path, mode)
|
||||
sys.fchmod(fd, mode)
|
||||
sys.fchmodat(fd, path, mode, flags)
|
||||
sys.chown(path, uid, gid)
|
||||
sys.fchown(fd, uid, gid)
|
||||
```
|
||||
|
||||
### Closing File Descriptors
|
||||
|
||||
Close is on `bun.FD`:
|
||||
|
||||
```zig
|
||||
fd.close(); // Asserts on error (use in defer)
|
||||
|
||||
// Or if you need error info:
|
||||
if (fd.closeAllowingBadFileDescriptor(null)) |err| {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
## Directory Operations
|
||||
|
||||
```zig
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const cwd = try sys.getcwd(&buf).unwrap();
|
||||
const cwdZ = try sys.getcwdZ(&buf).unwrap(); // Zero-terminated
|
||||
sys.chdir(path, destination)
|
||||
```
|
||||
|
||||
### Directory Iteration
|
||||
|
||||
Use `bun.DirIterator` instead of `std.fs.Dir.Iterator`:
|
||||
|
||||
```zig
|
||||
var iter = bun.iterateDir(dir_fd);
|
||||
while (true) {
|
||||
switch (iter.next()) {
|
||||
.result => |entry| {
|
||||
if (entry) |e| {
|
||||
const name = e.name.slice();
|
||||
const kind = e.kind; // .file, .directory, .sym_link, etc.
|
||||
} else {
|
||||
break; // End of directory
|
||||
}
|
||||
},
|
||||
.err => |err| return .{ .err = err },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Socket Operations
|
||||
|
||||
**Important**: `bun.sys` has limited socket support. For network I/O:
|
||||
|
||||
- **Non-blocking sockets**: Use `uws.Socket` (libuwebsockets) exclusively
|
||||
- **Pipes/blocking I/O**: Use `PipeReader.zig` and `PipeWriter.zig`
|
||||
|
||||
Available in bun.sys:
|
||||
|
||||
```zig
|
||||
sys.setsockopt(fd, level, optname, value)
|
||||
sys.socketpair(domain, socktype, protocol, nonblocking_status)
|
||||
```
|
||||
|
||||
Do NOT use `bun.sys` for socket read/write - use `uws.Socket` instead.
|
||||
|
||||
## Other Operations
|
||||
|
||||
```zig
|
||||
sys.ftruncate(fd, size)
|
||||
sys.lseek(fd, offset, whence)
|
||||
sys.dup(fd)
|
||||
sys.dupWithFlags(fd, flags)
|
||||
sys.fcntl(fd, cmd, arg)
|
||||
sys.pipe()
|
||||
sys.mmap(...)
|
||||
sys.munmap(memory)
|
||||
sys.access(path, mode)
|
||||
sys.futimens(fd, atime, mtime)
|
||||
sys.utimens(path, atime, mtime)
|
||||
```
|
||||
|
||||
## Error Type
|
||||
|
||||
```zig
|
||||
const err: bun.sys.Error = ...;
|
||||
err.errno // Raw errno value
|
||||
err.getErrno() // As std.posix.E enum
|
||||
err.syscall // Which syscall failed (Tag enum)
|
||||
err.fd // Optional: file descriptor
|
||||
err.path // Optional: path string
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Prefer `bun.sys.File` wrapper for most file operations
|
||||
- Use low-level `bun.sys` functions only when needed
|
||||
- Use `bun.O.*` flags instead of `std.os.O.*`
|
||||
- Handle `Maybe(T)` with switch or `.unwrap()`
|
||||
- Use `defer fd.close()` for cleanup
|
||||
- EINTR is handled automatically in most functions
|
||||
- For sockets, use `uws.Socket` not `bun.sys`
|
||||
@@ -1,5 +1,9 @@
|
||||
language: en-US
|
||||
|
||||
issue_enrichment:
|
||||
auto_enrich:
|
||||
enabled: false
|
||||
|
||||
reviews:
|
||||
profile: assertive
|
||||
request_changes_workflow: false
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
---
|
||||
description:
|
||||
globs: src/**/*.cpp,src/**/*.zig
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Run a file
|
||||
|
||||
To run a file, use:
|
||||
|
||||
```sh
|
||||
bun bd <file> <...args>
|
||||
```
|
||||
|
||||
**CRITICAL**: Never use `bun <file>` directly. It will not have your changes.
|
||||
|
||||
### Logging
|
||||
|
||||
`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope.
|
||||
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, .hidden);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
```
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. There are no commands to run.
|
||||
@@ -1,139 +0,0 @@
|
||||
---
|
||||
description: Writing HMR/Dev Server tests
|
||||
globs: test/bake/*
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server tests
|
||||
|
||||
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities and test harness
|
||||
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
|
||||
- class `Dev` (controls subprocess for dev server)
|
||||
- class `Client` (controls a happy-dom subprocess for having the page open)
|
||||
- more helpers
|
||||
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
|
||||
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
|
||||
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
|
||||
|
||||
## Categories
|
||||
|
||||
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
|
||||
css.test.ts - CSS tests concern bundling bugs with CSS files
|
||||
plugins.test.ts - Plugin tests concern plugins in development mode.
|
||||
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
|
||||
esm.test.ts - ESM tests are about various esm features in development mode.
|
||||
html.test.ts - HTML tests are tests relating to HTML files themselves.
|
||||
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
|
||||
sourcemap.test.ts - Tests verifying source-maps are correct.
|
||||
|
||||
## `devTest` Basics
|
||||
|
||||
A test takes in two primary inputs: `files` and `async test(dev) {`
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `
|
||||
console.log("hello");
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "World",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
// Works
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
// Editing HTML reloads
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "World",
|
||||
replace: "Hello",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "Bar",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("script.ts", {
|
||||
find: "hello",
|
||||
replace: "world",
|
||||
});
|
||||
});
|
||||
await c.expectMessage("world");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
|
||||
|
||||
## Testing for bundling errors
|
||||
|
||||
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<script type="module" src="/script.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
"script.ts": `
|
||||
import data from "./data";
|
||||
console.log(data);
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
@@ -1,413 +0,0 @@
|
||||
---
|
||||
description: JavaScript class implemented in C++
|
||||
globs: *.cpp
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Implementing JavaScript classes in C++
|
||||
|
||||
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
|
||||
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
|
||||
- class FooPrototype : public JSC::JSNonFinalObject
|
||||
- class FooConstructor : public JSC::InternalFunction
|
||||
|
||||
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
|
||||
|
||||
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
|
||||
|
||||
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
|
||||
|
||||
Generally, defining the subspace looks like this:
|
||||
|
||||
```c++
|
||||
|
||||
class Foo : public JSC::DestructibleObject {
|
||||
|
||||
// ...
|
||||
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
It's better to put it in the .cpp file instead of the .h file, when possible.
|
||||
|
||||
## Defining properties
|
||||
|
||||
Define properties on the prototype. Use a const HashTableValues like this:
|
||||
|
||||
```C++
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
|
||||
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
|
||||
|
||||
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
|
||||
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
|
||||
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
|
||||
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
|
||||
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
|
||||
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
|
||||
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
|
||||
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
|
||||
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
|
||||
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
|
||||
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
|
||||
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
|
||||
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
|
||||
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
|
||||
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
|
||||
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
|
||||
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
|
||||
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
|
||||
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
|
||||
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
|
||||
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
|
||||
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
|
||||
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
|
||||
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
|
||||
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
|
||||
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
|
||||
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
### Creating a prototype class
|
||||
|
||||
Follow a pattern like this:
|
||||
|
||||
```c++
|
||||
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
|
||||
{
|
||||
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.plainObjectSpace();
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
|
||||
|
||||
void JSX509CertificatePrototype::finishCreation(VM& vm)
|
||||
{
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
```
|
||||
|
||||
### Getter definition:
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
|
||||
}
|
||||
```
|
||||
|
||||
### Setter definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
|
||||
{
|
||||
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject))
|
||||
return false;
|
||||
|
||||
JSValue value = JSValue::decode(encodedValue);
|
||||
if (!value.isCell()) {
|
||||
// TODO:
|
||||
return true;
|
||||
}
|
||||
|
||||
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
### Function definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
|
||||
}
|
||||
```
|
||||
|
||||
### Constructor definition
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DECLARE_HOST_FUNCTION(callStats);
|
||||
JSC_DECLARE_HOST_FUNCTION(constructStats);
|
||||
|
||||
class JSStatsConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
|
||||
{
|
||||
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.internalFunctionSpace();
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure, callStats, constructStats)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "Stats"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Structure caching
|
||||
|
||||
If there's a class, prototype, and constructor:
|
||||
|
||||
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
|
||||
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
|
||||
|
||||
```c++#ZigGlobalObject.cpp
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
m_JSStatsBigIntClassStructure.initLater(
|
||||
[](LazyClassStructure::Initializer& init) {
|
||||
// Call the function to initialize our class structure.
|
||||
Bun::initJSBigIntStatsClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
|
||||
```c++
|
||||
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
|
||||
{
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
|
||||
|
||||
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
|
||||
|
||||
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
|
||||
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
```c++
|
||||
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
|
||||
{
|
||||
// If there is a prototype:
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
// If there is no prototype or it only has
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
if (!callFrame->argumentCount()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
JSValue arg = callFrame->uncheckedArgument(0);
|
||||
if (!arg.isCell()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* zigGlobalObject = defaultGlobalObject(globalObject);
|
||||
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
|
||||
JSValue newTarget = callFrame->newTarget();
|
||||
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
if (!newTarget) {
|
||||
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
}
|
||||
```
|
||||
|
||||
### Expose to Zig
|
||||
|
||||
To expose the constructor to zig:
|
||||
|
||||
```c++
|
||||
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
|
||||
{
|
||||
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
|
||||
}
|
||||
```
|
||||
|
||||
Zig:
|
||||
|
||||
```zig
|
||||
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
|
||||
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
|
||||
```
|
||||
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
|
||||
|
||||
```c++
|
||||
// X509* is whatever we need to create the object
|
||||
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
|
||||
{
|
||||
// ... implementation details
|
||||
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
|
||||
}
|
||||
```
|
||||
|
||||
And from Zig:
|
||||
|
||||
```zig
|
||||
const X509 = opaque {
|
||||
// ... class
|
||||
|
||||
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
|
||||
|
||||
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return Bun__X509__toJS(globalObject, this);
|
||||
}
|
||||
};
|
||||
```
|
||||
@@ -1,203 +0,0 @@
|
||||
# Registering Functions, Objects, and Modules in Bun
|
||||
|
||||
This guide documents the process of adding new functionality to the Bun global object and runtime.
|
||||
|
||||
## Overview
|
||||
|
||||
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
|
||||
|
||||
There are several key ways to expose functionality in Bun:
|
||||
|
||||
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
|
||||
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
|
||||
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
|
||||
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
|
||||
|
||||
## The Registration Process
|
||||
|
||||
Adding new functionality to Bun involves several coordinated steps across multiple files:
|
||||
|
||||
### 1. Implement the Core Functionality in Zig
|
||||
|
||||
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
|
||||
|
||||
- `src/valkey/` for Redis/Valkey client
|
||||
- `src/semver/` for SemVer functionality
|
||||
- `src/smtp/` for SMTP client
|
||||
|
||||
### 2. Create JavaScript Bindings
|
||||
|
||||
Create bindings that expose your Zig functionality to JavaScript:
|
||||
|
||||
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
|
||||
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
|
||||
|
||||
Example from a class definition file:
|
||||
|
||||
```typescript
|
||||
// Example from a .classes.ts file
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "YourFeature",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
yourMethod: {
|
||||
fn: "yourZigMethod",
|
||||
length: 1,
|
||||
},
|
||||
property: {
|
||||
getter: "getProperty",
|
||||
},
|
||||
},
|
||||
values: ["cachedValues"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
|
||||
|
||||
Add an entry to the `FOR_EACH_GETTER` macro:
|
||||
|
||||
```c
|
||||
// In BunObject+exports.h
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
macro(CryptoHasher) \
|
||||
... \
|
||||
macro(YourFeature) \
|
||||
```
|
||||
|
||||
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
|
||||
|
||||
Implement a getter function in `BunObject.zig` that returns your feature:
|
||||
|
||||
```zig
|
||||
// In BunObject.zig
|
||||
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
|
||||
|
||||
// In the exportAll() function:
|
||||
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
|
||||
```
|
||||
|
||||
### 5. Implement the Getter Function in a Relevant Zig File
|
||||
|
||||
Implement the function that creates your object:
|
||||
|
||||
```zig
|
||||
// In your main module file (e.g., src/your_feature/your_feature.zig)
|
||||
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.YourFeature.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
### 6. Add to Build System
|
||||
|
||||
Ensure your files are included in the build system by adding them to the appropriate targets.
|
||||
|
||||
## Example: Adding a New Module
|
||||
|
||||
Here's a comprehensive example of adding a hypothetical SMTP module:
|
||||
|
||||
1. Create implementation files in `src/smtp/`:
|
||||
|
||||
- `index.zig`: Main entry point that exports everything
|
||||
- `SmtpClient.zig`: Core SMTP client implementation
|
||||
- `js_smtp.zig`: JavaScript bindings
|
||||
- `js_bindings.classes.ts`: Class definition
|
||||
|
||||
2. Define your JS class in `js_bindings.classes.ts`:
|
||||
|
||||
```typescript
|
||||
import { define } from "../../codegen/class-definitions";
|
||||
|
||||
export default [
|
||||
define({
|
||||
name: "EmailClient",
|
||||
construct: true,
|
||||
finalize: true,
|
||||
hasPendingActivity: true,
|
||||
configurable: false,
|
||||
memoryCost: true,
|
||||
klass: {},
|
||||
JSType: "0b11101110",
|
||||
proto: {
|
||||
send: {
|
||||
fn: "send",
|
||||
length: 1,
|
||||
},
|
||||
verify: {
|
||||
fn: "verify",
|
||||
length: 0,
|
||||
},
|
||||
close: {
|
||||
fn: "close",
|
||||
length: 0,
|
||||
},
|
||||
},
|
||||
values: ["connectionPromise"],
|
||||
}),
|
||||
];
|
||||
```
|
||||
|
||||
3. Add getter to `BunObject+exports.h`:
|
||||
|
||||
```c
|
||||
#define FOR_EACH_GETTER(macro) \
|
||||
macro(CSRF) \
|
||||
... \
|
||||
macro(SMTP) \
|
||||
```
|
||||
|
||||
4. Add getter function to `BunObject.zig`:
|
||||
|
||||
```zig
|
||||
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
|
||||
|
||||
// In exportAll:
|
||||
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
|
||||
```
|
||||
|
||||
5. Implement getter in your module:
|
||||
|
||||
```zig
|
||||
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
|
||||
return JSC.API.JSEmailClient.getConstructor(globalThis);
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Follow Naming Conventions**: Align your naming with existing patterns
|
||||
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
|
||||
3. **Memory Management**: Be careful with memory management and reference counting
|
||||
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
|
||||
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
|
||||
6. **Testing**: Add tests for your new functionality
|
||||
|
||||
## Common Gotchas
|
||||
|
||||
- Be sure to handle reference counting properly with `ref()`/`deref()`
|
||||
- Always implement proper cleanup in `deinit()` and `finalize()`
|
||||
- For network operations, manage socket lifetimes correctly
|
||||
- Use `JSC.Codegen` correctly to generate necessary binding code
|
||||
|
||||
## Related Files
|
||||
|
||||
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
|
||||
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
|
||||
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
|
||||
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
|
||||
|
||||
## Additional Resources
|
||||
|
||||
For more detailed information on specific topics:
|
||||
|
||||
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
|
||||
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples
|
||||
@@ -1,91 +0,0 @@
|
||||
---
|
||||
description: Writing tests for Bun
|
||||
globs:
|
||||
---
|
||||
# Writing tests for Bun
|
||||
|
||||
## Where tests are found
|
||||
|
||||
You'll find all of Bun's tests in the `test/` directory.
|
||||
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
|
||||
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
|
||||
|
||||
## How tests are written
|
||||
|
||||
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
|
||||
|
||||
```js
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import assert, { AssertionError } from "assert";
|
||||
|
||||
describe("assert(expr)", () => {
|
||||
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
|
||||
expect(() => assert(expr)).not.toThrow();
|
||||
});
|
||||
|
||||
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
|
||||
expect(() => assert(expr)).toThrow(AssertionError);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Testing conventions
|
||||
|
||||
* See `test/harness.ts` for common test utilities and helpers
|
||||
* Be rigorous and test for edge-cases and unexpected inputs
|
||||
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
|
||||
* When you need to test Bun as a CLI, use the following pattern:
|
||||
|
||||
```js
|
||||
import { test, expect } from "bun:test";
|
||||
import { spawn } from "bun";
|
||||
import { bunExe, bunEnv } from "harness";
|
||||
|
||||
test("bun --version", async () => {
|
||||
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
|
||||
cmd: [bunExe(), "--version"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const [ exitCode, stdout, stderr ] = await Promise.all([
|
||||
exited,
|
||||
new Response(stdoutStream).text(),
|
||||
new Response(stderrStream).text(),
|
||||
]);
|
||||
expect({ exitCode, stdout, stderr }).toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: expect.stringContaining(Bun.version),
|
||||
stderr: "",
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Before writing a test
|
||||
|
||||
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
|
||||
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
|
||||
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
|
||||
* Always attempt to find related tests in an existing test file before creating a new test file
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,509 +0,0 @@
|
||||
---
|
||||
description: How Zig works with JavaScriptCore bindings generator
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files that connect everything)
|
||||
|
||||
## Class Definition Files
|
||||
|
||||
### JavaScript Interface (.classes.ts)
|
||||
|
||||
The `.classes.ts` files define the JavaScript API using a declarative approach:
|
||||
|
||||
```typescript
|
||||
// Example: encoding.classes.ts
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: {
|
||||
// Function definition
|
||||
args: 1,
|
||||
},
|
||||
encoding: {
|
||||
// Getter with caching
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
fatal: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
ignoreBOM: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Each class definition specifies:
|
||||
|
||||
- The class name
|
||||
- Whether it has a constructor
|
||||
- JavaScript type (object, function, etc.)
|
||||
- Properties and methods in the `proto` field
|
||||
- Caching strategy for properties
|
||||
- Finalization requirements
|
||||
|
||||
### Zig Implementation (.zig)
|
||||
|
||||
The Zig files implement the native functionality:
|
||||
|
||||
```zig
|
||||
// Example: TextDecoder.zig
|
||||
pub const TextDecoder = struct {
|
||||
// Expose generated bindings as `js` namespace with trait conversion methods
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// Internal state
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
ignoreBOM: bool,
|
||||
|
||||
// Constructor implementation - note use of globalObject
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
// Implementation
|
||||
|
||||
return bun.new(TextDecoder, .{
|
||||
// Fields
|
||||
});
|
||||
}
|
||||
|
||||
// Prototype methods - note return type includes JSError
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getters
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.fatal);
|
||||
}
|
||||
|
||||
// Cleanup - note standard pattern of using deinit/deref
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release any retained resources
|
||||
// Free the pointer at the end.
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
Key components in the Zig file:
|
||||
|
||||
- The struct containing native state
|
||||
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
|
||||
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
|
||||
- Consistent use of `globalObject` parameter name instead of `ctx`
|
||||
- Methods matching the JavaScript interface
|
||||
- Getters/setters for properties
|
||||
- Proper resource cleanup pattern with `deinit()` and `finalize()`
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class
|
||||
|
||||
## Code Generation System
|
||||
|
||||
The binding generator produces C++ code that connects JavaScript and Zig:
|
||||
|
||||
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
|
||||
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
|
||||
3. **Method Binding**: Connects JS function calls to Zig implementations
|
||||
4. **Type Conversion**: Converts between JS values and Zig types
|
||||
5. **Property Caching**: Implements the caching system for properties
|
||||
|
||||
The generated C++ code includes:
|
||||
|
||||
- A JSC wrapper class (`JSTextDecoder`)
|
||||
- A prototype class (`JSTextDecoderPrototype`)
|
||||
- A constructor function (`JSTextDecoderConstructor`)
|
||||
- Function bindings (`TextDecoderPrototype__decodeCallback`)
|
||||
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
The `CallFrame` object provides access to JavaScript execution context:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Get arguments
|
||||
const input = callFrame.argument(0);
|
||||
const options = callFrame.argument(1);
|
||||
|
||||
// Get this value
|
||||
const thisValue = callFrame.thisValue();
|
||||
|
||||
// Implementation with error handling
|
||||
if (input.isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null or undefined", .{});
|
||||
}
|
||||
|
||||
// Return value or throw error
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
```
|
||||
|
||||
CallFrame methods include:
|
||||
|
||||
- `argument(i)`: Get the i-th argument
|
||||
- `argumentCount()`: Get the number of arguments
|
||||
- `thisValue()`: Get the `this` value
|
||||
- `callee()`: Get the function being called
|
||||
|
||||
## Property Caching and GC-Owned Values
|
||||
|
||||
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
|
||||
|
||||
```typescript
|
||||
encoding: {
|
||||
getter: true,
|
||||
cache: true, // Enable caching
|
||||
}
|
||||
```
|
||||
|
||||
### C++ Implementation
|
||||
|
||||
In the generated C++ code, caching uses JSC's WriteBarrier:
|
||||
|
||||
```cpp
|
||||
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
|
||||
auto& vm = JSC::getVM(lexicalGlobalObject);
|
||||
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
|
||||
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
|
||||
|
||||
// Check for cached value and return if present
|
||||
if (JSValue cachedValue = thisObject->m_encoding.get())
|
||||
return JSValue::encode(cachedValue);
|
||||
|
||||
// Get value from Zig implementation
|
||||
JSC::JSValue result = JSC::JSValue::decode(
|
||||
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
|
||||
);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
// Store in cache for future access
|
||||
thisObject->m_encoding.set(vm, thisObject, result);
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
|
||||
}
|
||||
```
|
||||
|
||||
### Zig Accessor Functions
|
||||
|
||||
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
|
||||
|
||||
```zig
|
||||
// External function declarations
|
||||
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
|
||||
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
|
||||
|
||||
/// `TextDecoder.encoding` setter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
JSC.markBinding(@src());
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
|
||||
/// `TextDecoder.encoding` getter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero)
|
||||
return null;
|
||||
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
### Benefits of GC-Owned Values
|
||||
|
||||
This system provides several key benefits:
|
||||
|
||||
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
|
||||
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
|
||||
3. **Consistent Access**: Zig code can easily get/set these cached JS values
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
|
||||
### Use Cases
|
||||
|
||||
GC-owned cached values are particularly useful for:
|
||||
|
||||
1. **Computed Properties**: Store expensive computation results
|
||||
2. **Lazily Created Objects**: Create objects only when needed, then cache them
|
||||
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
|
||||
4. **Memoization**: Cache results based on input parameters
|
||||
|
||||
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
|
||||
|
||||
## Memory Management and Finalization
|
||||
|
||||
The binding system handles memory management across the JavaScript/Zig boundary:
|
||||
|
||||
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
|
||||
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
|
||||
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
|
||||
|
||||
Bun uses a consistent pattern for resource cleanup:
|
||||
|
||||
```zig
|
||||
// Resource cleanup method - separate from finalization
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
// Release resources like strings
|
||||
this._encoding.deref(); // String deref pattern
|
||||
|
||||
// Free any buffers
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the GC when object is collected
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src()); // For debugging
|
||||
this.deinit(); // Clean up resources
|
||||
bun.default_allocator.destroy(this); // Free the object itself
|
||||
}
|
||||
```
|
||||
|
||||
Some objects that hold references to other JS objects use `.deref()` instead:
|
||||
|
||||
```zig
|
||||
pub fn finalize(this: *SocketAddress) void {
|
||||
JSC.markBinding(@src());
|
||||
this._presentation.deref(); // Release references
|
||||
this.destroy();
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling with JSError
|
||||
|
||||
Bun uses `bun.JSError!JSValue` return type for proper error handling:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Throwing an error
|
||||
if (callFrame.argumentCount() < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
|
||||
// Or returning a success value
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
This pattern allows Zig functions to:
|
||||
|
||||
1. Return JavaScript values on success
|
||||
2. Throw JavaScript exceptions on error
|
||||
3. Propagate errors automatically through the call stack
|
||||
|
||||
## Type Safety and Error Handling
|
||||
|
||||
The binding system includes robust error handling:
|
||||
|
||||
```cpp
|
||||
// Example of type checking in generated code
|
||||
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
|
||||
return {};
|
||||
}
|
||||
```
|
||||
|
||||
## Prototypal Inheritance
|
||||
|
||||
The binding system creates proper JavaScript prototype chains:
|
||||
|
||||
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
|
||||
2. **Prototype**: JSTextDecoderPrototype with methods and properties
|
||||
3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype
|
||||
|
||||
This ensures JavaScript inheritance works as expected:
|
||||
|
||||
```cpp
|
||||
// From generated code
|
||||
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
|
||||
|
||||
// Set up the prototype chain
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The binding system is optimized for performance:
|
||||
|
||||
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
|
||||
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
|
||||
|
||||
## Creating a New Class Binding
|
||||
|
||||
To create a new class binding in Bun:
|
||||
|
||||
1. **Define the class interface** in a `.classes.ts` file:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: {
|
||||
args: 1,
|
||||
},
|
||||
myProperty: {
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. **Implement the native functionality** in a `.zig` file:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
// Generated bindings
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// State
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
// Constructor
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*MyClass {
|
||||
const arg = callFrame.argument(0);
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Method
|
||||
pub fn myMethod(
|
||||
this: *MyClass,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getter
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
// Resource cleanup
|
||||
pub fn deinit(this: *MyClass) void {
|
||||
// Clean up resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
|
||||
- C++ class definitions
|
||||
- Method and property bindings
|
||||
- Memory management utilities
|
||||
- GC integration code
|
||||
|
||||
## Generated Code Structure
|
||||
|
||||
The binding generator produces several components:
|
||||
|
||||
### 1. C++ Classes
|
||||
|
||||
For each Zig class, the system generates:
|
||||
|
||||
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
|
||||
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
|
||||
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
|
||||
|
||||
### 2. C++ Methods and Properties
|
||||
|
||||
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
|
||||
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
|
||||
- **Initialization Functions**: `finishCreation` methods for setting up the class
|
||||
|
||||
### 3. Zig Bindings
|
||||
|
||||
- **External Function Declarations**:
|
||||
|
||||
```zig
|
||||
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
|
||||
```
|
||||
|
||||
- **Cached Value Accessors**:
|
||||
|
||||
```zig
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
|
||||
```
|
||||
|
||||
- **Constructor Helpers**:
|
||||
```zig
|
||||
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
|
||||
```
|
||||
|
||||
### 4. GC Integration
|
||||
|
||||
- **Memory Cost Calculation**: `estimatedSize` method
|
||||
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
|
||||
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
|
||||
|
||||
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.
|
||||
66
.github/workflows/claude.yml
vendored
66
.github/workflows/claude.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Claude Code
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review_comment:
|
||||
types: [created]
|
||||
issues:
|
||||
types: [opened, assigned]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
github.repository == 'oven-sh/bun' &&
|
||||
(
|
||||
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
|
||||
) &&
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: claude
|
||||
env:
|
||||
IS_SANDBOX: 1
|
||||
container:
|
||||
image: localhost:5000/claude-bun:latest
|
||||
options: --privileged --user 1000:1000
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
working-directory: /workspace/bun
|
||||
run: |
|
||||
git config --global user.email "claude-bot@bun.sh" && \
|
||||
git config --global user.name "Claude Bot" && \
|
||||
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
|
||||
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
|
||||
git config --global --add safe.directory /workspace/bun && \
|
||||
git config --global push.default current && \
|
||||
git config --global pull.rebase true && \
|
||||
git config --global init.defaultBranch main && \
|
||||
git config --global core.editor "vim" && \
|
||||
git config --global color.ui auto && \
|
||||
git config --global fetch.prune true && \
|
||||
git config --global diff.colorMoved zebra && \
|
||||
git config --global merge.conflictStyle diff3 && \
|
||||
git config --global rerere.enabled true && \
|
||||
git config --global core.autocrlf input
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.ref }}
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
with:
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
--dangerously-skip-permissions
|
||||
--system-prompt "You are working on the Bun codebase"
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
58
.github/workflows/codex-test-sync.yml
vendored
58
.github/workflows/codex-test-sync.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: Codex Test Sync
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, opened]
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.15"
|
||||
|
||||
jobs:
|
||||
sync-node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
|
||||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
|
||||
contains(github.head_ref, 'codex')
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: |
|
||||
test/js/node/test/parallel/**/*.{js,mjs,ts}
|
||||
test/js/node/test/sequential/**/*.{js,mjs,ts}
|
||||
|
||||
- name: Sync tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Changed test files:"
|
||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
||||
|
||||
# Process each changed test file
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
# Extract test name from file path
|
||||
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
|
||||
echo "Syncing test: $test_name"
|
||||
bun node:test:cp "$test_name"
|
||||
done
|
||||
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Sync Node.js tests with upstream"
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,6 +9,7 @@
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npmrc
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
|
||||
16
.vscode/settings.json
vendored
16
.vscode/settings.json
vendored
@@ -27,18 +27,22 @@
|
||||
"git.ignoreLimitWarning": true,
|
||||
|
||||
// Zig
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOption": "build",
|
||||
// "zig.initialSetupDone": true,
|
||||
// "zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
"zig.buildOnSaveArgs": [
|
||||
"-Dgenerated-code=./build/debug/codegen",
|
||||
"--watch",
|
||||
"-fincremental"
|
||||
],
|
||||
// "zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
// "zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
|
||||
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
"zig.zls.enableInlayHints": false,
|
||||
// "zig.zls.enableInlayHints": false,
|
||||
"[zig]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.useTabStops": false,
|
||||
|
||||
@@ -6,7 +6,7 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
- **Build Bun**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- **CRITICAL**: do not set a timeout when running `bun bd`
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
@@ -94,7 +94,7 @@ test("(multi-file test) my feature", async () => {
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. These tests will never fail in CI.
|
||||
- Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories.
|
||||
- When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure.
|
||||
- **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION.
|
||||
|
||||
@@ -47,15 +47,7 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
find_program(SCCACHE_PROGRAM sccache)
|
||||
if(SCCACHE_PROGRAM AND NOT DEFINED ENV{NO_SCCACHE})
|
||||
include(SetupSccache)
|
||||
else()
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
include(SetupCcache)
|
||||
endif()
|
||||
endif()
|
||||
include(SetupCcache)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
@@ -23,7 +23,7 @@ Using your system's package manager, install Bun's dependencies:
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby sccache
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
@@ -65,43 +65,28 @@ $ brew install bun
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Optional: Install `sccache`
|
||||
### Optional: Install `ccache`
|
||||
|
||||
sccache is used to cache compilation artifacts, significantly speeding up builds. It must be installed with S3 support:
|
||||
ccache is used to cache compilation artifacts, significantly speeding up builds:
|
||||
|
||||
```bash
|
||||
# For macOS
|
||||
$ brew install sccache
|
||||
$ brew install ccache
|
||||
|
||||
# For Linux. Note that the version in your package manager may not have S3 support.
|
||||
$ cargo install sccache --features=s3
|
||||
# For Ubuntu/Debian
|
||||
$ sudo apt install ccache
|
||||
|
||||
# For Arch
|
||||
$ sudo pacman -S ccache
|
||||
|
||||
# For Fedora
|
||||
$ sudo dnf install ccache
|
||||
|
||||
# For openSUSE
|
||||
$ sudo zypper install ccache
|
||||
```
|
||||
|
||||
This will install `sccache` with S3 support. Our build scripts will automatically detect and use `sccache` with our shared S3 cache. **Note**: Not all versions of `sccache` are compiled with S3 support, hence we recommend installing it via `cargo`.
|
||||
|
||||
#### Registering AWS Credentials for `sccache` (Core Developers Only)
|
||||
|
||||
Core developers have write access to the shared S3 cache. To enable write access, you must log in with AWS credentials. The easiest way to do this is to use the [`aws` CLI](https://aws.amazon.com/cli/) and invoke [`aws configure` to provide your AWS security info](https://docs.aws.amazon.com/cli/latest/reference/configure/).
|
||||
|
||||
The `cmake` scripts should automatically detect your AWS credentials from the environment or the `~/.aws/credentials` file.
|
||||
|
||||
<details>
|
||||
<summary>Logging in to the `aws` CLI</summary>
|
||||
|
||||
1. Install the AWS CLI by following [the official guide](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html).
|
||||
2. Log in to your AWS account console. A team member should provide you with your credentials.
|
||||
3. Click your name in the top right > Security credentials.
|
||||
4. Scroll to "Access keys" and create a new access key.
|
||||
5. Run `aws configure` in your terminal and provide the access key ID and secret access key when prompted.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Common Issues You May Encounter</summary>
|
||||
|
||||
- To confirm that the cache is being used, you can use the `sccache --show-stats` command right after a build. This will expose very useful statistics, including cache hits/misses.
|
||||
- If you have multiple AWS profiles configured, ensure that the correct profile is set in the `AWS_PROFILE` environment variable.
|
||||
- `sccache` follows a server-client model. If you run into weird issues where `sccache` refuses to use S3, even though you have AWS credentials configured, try killing any running `sccache` servers with `sccache --stop-server` and then re-running the build.
|
||||
</details>
|
||||
Our build scripts will automatically detect and use `ccache` if available. You can check cache statistics with `ccache --show-stats`.
|
||||
|
||||
## Install LLVM
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
@@ -22,6 +23,7 @@
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
@@ -107,6 +109,8 @@
|
||||
|
||||
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
|
||||
|
||||
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
|
||||
@@ -181,6 +185,8 @@
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
@@ -361,6 +367,10 @@
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
|
||||
|
||||
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
|
||||
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
@@ -457,6 +467,8 @@
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
|
||||
|
||||
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
|
||||
@@ -481,7 +493,7 @@
|
||||
|
||||
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
|
||||
|
||||
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
@@ -501,6 +513,8 @@
|
||||
|
||||
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
|
||||
|
||||
"lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "installbench",
|
||||
@@ -12,7 +13,7 @@
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "^15.2.3",
|
||||
"next": "15.5.7",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
@@ -175,23 +176,23 @@
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.2", "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@next/env": ["@next/env@15.5.6", "", {}, "sha512-3qBGRW+sCGzgbpc5TS1a0p7eNxnOarGVQhZxfvTdnV0gFI61lX7QNtQ4V1TSREctXzYn5NetbUsLvyqwLFJM6Q=="],
|
||||
"@next/env": ["@next/env@15.5.7", "", {}, "sha512-4h6Y2NyEkIEN7Z8YxkA27pq6zTkS09bUSYC0xjd0NpwFxjnIKeZEeH591o5WECSmjpUhLn3H2QLJcDye3Uzcvg=="],
|
||||
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ES3nRz7N+L5Umz4KoGfZ4XX6gwHplwPhioVRc25+QNsDa7RtUF/z8wJcbuQ2Tffm5RZwuN2A063eapoJ1u4nPg=="],
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.7", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IZwtxCEpI91HVU/rAUOOobWSZv4P2DeTtNaCdHqLcTJU4wdNXgAySvKa/qJCgR5m6KI8UsKDXtO2B31jcaw1Yw=="],
|
||||
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-JIGcytAyk9LQp2/nuVZPAtj8uaJ/zZhsKOASTjxDug0SPU9LAM3wy6nPU735M1OqacR4U20LHVF5v5Wnl9ptTA=="],
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.7", "", { "os": "darwin", "cpu": "x64" }, "sha512-UP6CaDBcqaCBuiq/gfCEJw7sPEoX1aIjZHnBWN9v9qYHQdMKvCKcAVs4OX1vIjeE+tC5EIuwDTVIoXpUes29lg=="],
|
||||
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-qvz4SVKQ0P3/Im9zcS2RmfFL/UCQnsJKJwQSkissbngnB/12c6bZTCB0gHTexz1s6d/mD0+egPKXAIRFVS7hQg=="],
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-NCslw3GrNIw7OgmRBxHtdWFQYhexoUCq+0oS2ccjyYLtcn1SzGzeM54jpTFonIMUjNbHmpKpziXnpxhSWLcmBA=="],
|
||||
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-FsbGVw3SJz1hZlvnWD+T6GFgV9/NYDeLTNQB2MXoPN5u9VA9OEDy6fJEfePfsUKAhJufFbZLgp0cPxMuV6SV0w=="],
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-nfymt+SE5cvtTrG9u1wdoxBr9bVB7mtKTcj0ltRn6gkP/2Nu1zM5ei8rwP9qKQP0Y//umK+TtkKgNtfboBxRrw=="],
|
||||
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-3QnHGFWlnvAgyxFxt2Ny8PTpXtQD7kVEeaFat5oPAHHI192WKYB+VIKZijtHLGdBBvc16tiAkPTDmQNOQ0dyrA=="],
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-hvXcZvCaaEbCZcVzcY7E1uXN9xWZfFvkNHwbe/n4OkRhFWrs1J1QV+4U1BN06tXLdaS4DazEGXwgqnu/VMcmqw=="],
|
||||
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-OsGX148sL+TqMK9YFaPFPoIaJKbFJJxFzkXZljIgA9hjMjdruKht6xDCEv1HLtlLNfkx3c5w2GLKhj7veBQizQ=="],
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-4IUO539b8FmF0odY6/SqANJdgwn1xs1GkPO5doZugwZ3ETF6JUdckk7RGmsfSf7ws8Qb2YB5It33mvNL/0acqA=="],
|
||||
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-ONOMrqWxdzXDJNh2n60H6gGyKed42Ieu6UTVPZteXpuKbLZTH4G4eBMsr5qWgOBA+s7F+uB4OJbZnrkEDnZ5Fg=="],
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.7", "", { "os": "win32", "cpu": "arm64" }, "sha512-CpJVTkYI3ZajQkC5vajM7/ApKJUOlm6uP4BknM3XKvJ7VXAvCqSjSLmM0LKdYzn6nBJVSjdclx8nYJSa3xlTgQ=="],
|
||||
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.6", "", { "os": "win32", "cpu": "x64" }, "sha512-pxK4VIjFRx1MY92UycLOOw7dTdvccWsNETQ0kDHkBlcFH1GrTLUjSiHU1ohrznnux6TqRHgv5oflhfIWZwVROQ=="],
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.7", "", { "os": "win32", "cpu": "x64" }, "sha512-gMzgBX164I6DN+9/PGA+9dQiwmTkE4TloBNx8Kv9UiGARsr9Nba7IpcBRA1iTV9vwlYnrE3Uy6I7Aj6qLjQuqw=="],
|
||||
|
||||
"@panva/hkdf": ["@panva/hkdf@1.2.1", "", {}, "sha512-6oclG6Y3PiDFcoyk8srjLfVKyMfVCKJ27JwNPViuXziFpmdz+MZnZN/aKY0JGXgYuO/VghU0jcOAZgWXZ1Dmrw=="],
|
||||
|
||||
@@ -323,7 +324,7 @@
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"next": ["next@15.5.6", "", { "dependencies": { "@next/env": "15.5.6", "@swc/helpers": "0.5.15", "caniuse-lite": "1.0.30001752", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.6", "@next/swc-darwin-x64": "15.5.6", "@next/swc-linux-arm64-gnu": "15.5.6", "@next/swc-linux-arm64-musl": "15.5.6", "@next/swc-linux-x64-gnu": "15.5.6", "@next/swc-linux-x64-musl": "15.5.6", "@next/swc-win32-arm64-msvc": "15.5.6", "@next/swc-win32-x64-msvc": "15.5.6", "sharp": "0.34.4" }, "peerDependencies": { "react": "19.2.0", "react-dom": "19.2.0" }, "bin": { "next": "dist/bin/next" } }, "sha512-zTxsnI3LQo3c9HSdSf91O1jMNsEzIXDShXd4wVdg9y5shwLqBXi4ZtUUJyB86KGVSJLZx0PFONvO54aheGX8QQ=="],
|
||||
"next": ["next@15.5.7", "", { "dependencies": { "@next/env": "15.5.7", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.7", "@next/swc-darwin-x64": "15.5.7", "@next/swc-linux-arm64-gnu": "15.5.7", "@next/swc-linux-arm64-musl": "15.5.7", "@next/swc-linux-x64-gnu": "15.5.7", "@next/swc-linux-x64-musl": "15.5.7", "@next/swc-win32-arm64-msvc": "15.5.7", "@next/swc-win32-x64-msvc": "15.5.7", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-+t2/0jIJ48kUpGKkdlhgkv+zPTEOoXyr60qXe68eB/pl3CMJaLeIGjzp5D6Oqt25hCBiBTt8wEeeAzfJvUKnPQ=="],
|
||||
|
||||
"next-auth": ["next-auth@5.0.0-beta.25", "", { "dependencies": { "@auth/core": "0.37.2" }, "peerDependencies": { "next": "15.5.6", "react": "19.2.0" } }, "sha512-2dJJw1sHQl2qxCrRk+KTQbeH+izFbGFPuJj5eGgBZFYyiYYtvlrBeUw1E/OJJxTRjuxbSYGnCTkUIRsIIW0bog=="],
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "^15.2.3",
|
||||
"next": "15.5.7",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tar": "^7.4.3",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
|
||||
477
bench/snippets/archive.mjs
Normal file
477
bench/snippets/archive.mjs
Normal file
@@ -0,0 +1,477 @@
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { Pack, Unpack } from "tar";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
// Check if Bun.Archive is available
|
||||
const hasBunArchive = typeof Bun !== "undefined" && typeof Bun.Archive !== "undefined";
|
||||
|
||||
// Test data sizes
|
||||
const smallContent = "Hello, World!";
|
||||
const mediumContent = Buffer.alloc(10 * 1024, "x").toString(); // 10KB
|
||||
const largeContent = Buffer.alloc(100 * 1024, "x").toString(); // 100KB
|
||||
|
||||
// Create test files for node-tar (it reads from filesystem)
|
||||
const setupDir = mkdtempSync(join(tmpdir(), "archive-bench-setup-"));
|
||||
|
||||
function setupNodeTarFiles(prefix, files) {
|
||||
const dir = join(setupDir, prefix);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
for (const [name, content] of Object.entries(files)) {
|
||||
const filePath = join(dir, name);
|
||||
const fileDir = join(filePath, "..");
|
||||
mkdirSync(fileDir, { recursive: true });
|
||||
writeFileSync(filePath, content);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
// Setup directories for different test cases
|
||||
const smallFilesDir = setupNodeTarFiles("small", {
|
||||
"file1.txt": smallContent,
|
||||
"file2.txt": smallContent,
|
||||
"file3.txt": smallContent,
|
||||
});
|
||||
|
||||
const mediumFilesDir = setupNodeTarFiles("medium", {
|
||||
"file1.txt": mediumContent,
|
||||
"file2.txt": mediumContent,
|
||||
"file3.txt": mediumContent,
|
||||
});
|
||||
|
||||
const largeFilesDir = setupNodeTarFiles("large", {
|
||||
"file1.txt": largeContent,
|
||||
"file2.txt": largeContent,
|
||||
"file3.txt": largeContent,
|
||||
});
|
||||
|
||||
const manyFilesEntries = {};
|
||||
for (let i = 0; i < 100; i++) {
|
||||
manyFilesEntries[`file${i}.txt`] = smallContent;
|
||||
}
|
||||
const manyFilesDir = setupNodeTarFiles("many", manyFilesEntries);
|
||||
|
||||
// Pre-create archives for extraction benchmarks
|
||||
let smallTarGzBuffer, mediumTarGzBuffer, largeTarGzBuffer, manyFilesTarGzBuffer;
|
||||
let smallTarBuffer, mediumTarBuffer, largeTarBuffer, manyFilesTarBuffer;
|
||||
let smallBunArchiveGz, mediumBunArchiveGz, largeBunArchiveGz, manyFilesBunArchiveGz;
|
||||
let smallBunArchive, mediumBunArchive, largeBunArchive, manyFilesBunArchive;
|
||||
|
||||
// Create tar buffer using node-tar (with optional gzip)
|
||||
async function createNodeTarBuffer(cwd, files, gzip = false) {
|
||||
return new Promise(resolve => {
|
||||
const pack = new Pack({ cwd, gzip });
|
||||
const bufs = [];
|
||||
pack.on("data", chunk => bufs.push(chunk));
|
||||
pack.on("end", () => resolve(Buffer.concat(bufs)));
|
||||
for (const file of files) {
|
||||
pack.add(file);
|
||||
}
|
||||
pack.end();
|
||||
});
|
||||
}
|
||||
|
||||
// Extract tar buffer using node-tar
|
||||
async function extractNodeTarBuffer(buffer, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const unpack = new Unpack({ cwd });
|
||||
unpack.on("end", resolve);
|
||||
unpack.on("error", reject);
|
||||
unpack.end(buffer);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize gzipped archives
|
||||
smallTarGzBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
mediumTarGzBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
largeTarGzBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
manyFilesTarGzBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
|
||||
// Initialize uncompressed archives
|
||||
smallTarBuffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
mediumTarBuffer = await createNodeTarBuffer(mediumFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
largeTarBuffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
manyFilesTarBuffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
|
||||
const smallFiles = { "file1.txt": smallContent, "file2.txt": smallContent, "file3.txt": smallContent };
|
||||
const mediumFiles = { "file1.txt": mediumContent, "file2.txt": mediumContent, "file3.txt": mediumContent };
|
||||
const largeFiles = { "file1.txt": largeContent, "file2.txt": largeContent, "file3.txt": largeContent };
|
||||
|
||||
if (hasBunArchive) {
|
||||
smallBunArchiveGz = await Bun.Archive.from(smallFiles).bytes("gzip");
|
||||
mediumBunArchiveGz = await Bun.Archive.from(mediumFiles).bytes("gzip");
|
||||
largeBunArchiveGz = await Bun.Archive.from(largeFiles).bytes("gzip");
|
||||
manyFilesBunArchiveGz = await Bun.Archive.from(manyFilesEntries).bytes("gzip");
|
||||
|
||||
smallBunArchive = await Bun.Archive.from(smallFiles).bytes();
|
||||
mediumBunArchive = await Bun.Archive.from(mediumFiles).bytes();
|
||||
largeBunArchive = await Bun.Archive.from(largeFiles).bytes();
|
||||
manyFilesBunArchive = await Bun.Archive.from(manyFilesEntries).bytes();
|
||||
}
|
||||
|
||||
// Create reusable extraction directories (overwriting is fine)
|
||||
const extractDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-extract-node-"));
|
||||
const extractDirBun = mkdtempSync(join(tmpdir(), "archive-bench-extract-bun-"));
|
||||
const writeDirNodeTar = mkdtempSync(join(tmpdir(), "archive-bench-write-node-"));
|
||||
const writeDirBun = mkdtempSync(join(tmpdir(), "archive-bench-write-bun-"));
|
||||
|
||||
// ============================================================================
|
||||
// Create .tar (uncompressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("create .tar (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallFiles).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeFiles).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesEntries).bytes();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Create .tar.gz (compressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("create .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallFiles).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar.gz (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeFiles).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("create .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesEntries).bytes("gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Extract .tar (uncompressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("extract .tar (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(smallTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(largeTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(manyFilesTarBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchive).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Extract .tar.gz (compressed) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("extract .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(smallTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(smallBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar.gz (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(largeTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(largeBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("extract .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await extractNodeTarBuffer(manyFilesTarGzBuffer, extractDirNodeTar);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchiveGz).extract(extractDirBun);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Write .tar to disk benchmarks
|
||||
// ============================================================================
|
||||
|
||||
let writeCounter = 0;
|
||||
|
||||
group("write .tar to disk (3 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), smallFiles);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar to disk (3 x 100KB files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), largeFiles);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar to disk (100 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), false);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar`), manyFilesEntries);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Write .tar.gz to disk benchmarks
|
||||
// ============================================================================
|
||||
|
||||
group("write .tar.gz to disk (3 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(smallFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), smallFiles, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar.gz to disk (3 x 100KB files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(largeFilesDir, ["file1.txt", "file2.txt", "file3.txt"], true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), largeFiles, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("write .tar.gz to disk (100 small files)", () => {
|
||||
bench("node-tar + writeFileSync", async () => {
|
||||
const buffer = await createNodeTarBuffer(manyFilesDir, Object.keys(manyFilesEntries), true);
|
||||
writeFileSync(join(writeDirNodeTar, `archive-${writeCounter++}.tar.gz`), buffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.write", async () => {
|
||||
await Bun.Archive.write(join(writeDirBun, `archive-${writeCounter++}.tar.gz`), manyFilesEntries, "gzip");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Get files array from archive (files() method) benchmarks
|
||||
// ============================================================================
|
||||
|
||||
// Helper to get files array from node-tar (reads all entries into memory)
|
||||
async function getFilesArrayNodeTar(buffer) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = new Map();
|
||||
let pending = 0;
|
||||
let closed = false;
|
||||
|
||||
const maybeResolve = () => {
|
||||
if (closed && pending === 0) {
|
||||
resolve(files);
|
||||
}
|
||||
};
|
||||
|
||||
const unpack = new Unpack({
|
||||
onReadEntry: entry => {
|
||||
if (entry.type === "File") {
|
||||
pending++;
|
||||
const chunks = [];
|
||||
entry.on("data", chunk => chunks.push(chunk));
|
||||
entry.on("end", () => {
|
||||
const content = Buffer.concat(chunks);
|
||||
// Create a File-like object similar to Bun.Archive.files()
|
||||
files.set(entry.path, new Blob([content]));
|
||||
pending--;
|
||||
maybeResolve();
|
||||
});
|
||||
}
|
||||
entry.resume(); // Drain the entry
|
||||
},
|
||||
});
|
||||
unpack.on("close", () => {
|
||||
closed = true;
|
||||
maybeResolve();
|
||||
});
|
||||
unpack.on("error", reject);
|
||||
unpack.end(buffer);
|
||||
});
|
||||
}
|
||||
|
||||
group("files() - get all files as Map (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(smallTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(smallBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map (3 x 100KB files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(largeTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(largeBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(manyFilesTarBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchive).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map from .tar.gz (3 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(smallTarGzBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(smallBunArchiveGz).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("files() - get all files as Map from .tar.gz (100 small files)", () => {
|
||||
bench("node-tar", async () => {
|
||||
await getFilesArrayNodeTar(manyFilesTarGzBuffer);
|
||||
});
|
||||
|
||||
if (hasBunArchive) {
|
||||
bench("Bun.Archive.files()", async () => {
|
||||
await Bun.Archive.from(manyFilesBunArchiveGz).files();
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
// Cleanup
|
||||
rmSync(setupDir, { recursive: true, force: true });
|
||||
rmSync(extractDirNodeTar, { recursive: true, force: true });
|
||||
rmSync(extractDirBun, { recursive: true, force: true });
|
||||
rmSync(writeDirNodeTar, { recursive: true, force: true });
|
||||
rmSync(writeDirBun, { recursive: true, force: true });
|
||||
335
bench/snippets/array-of.js
Normal file
335
bench/snippets/array-of.js
Normal file
@@ -0,0 +1,335 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let sink;
|
||||
|
||||
// Integers
|
||||
bench("int: Array.of(1,2,3,4,5)", () => {
|
||||
sink = Array.of(1, 2, 3, 4, 5);
|
||||
});
|
||||
|
||||
bench("int: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65,
|
||||
66,
|
||||
67,
|
||||
68,
|
||||
69,
|
||||
70,
|
||||
71,
|
||||
72,
|
||||
73,
|
||||
74,
|
||||
75,
|
||||
76,
|
||||
77,
|
||||
78,
|
||||
79,
|
||||
80,
|
||||
81,
|
||||
82,
|
||||
83,
|
||||
84,
|
||||
85,
|
||||
86,
|
||||
87,
|
||||
88,
|
||||
89,
|
||||
90,
|
||||
91,
|
||||
92,
|
||||
93,
|
||||
94,
|
||||
95,
|
||||
96,
|
||||
97,
|
||||
98,
|
||||
99,
|
||||
);
|
||||
});
|
||||
|
||||
// Doubles
|
||||
bench("double: Array.of(1.1,2.2,3.3,4.4,5.5)", () => {
|
||||
sink = Array.of(1.1, 2.2, 3.3, 4.4, 5.5);
|
||||
});
|
||||
|
||||
bench("double: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
0.1,
|
||||
1.1,
|
||||
2.1,
|
||||
3.1,
|
||||
4.1,
|
||||
5.1,
|
||||
6.1,
|
||||
7.1,
|
||||
8.1,
|
||||
9.1,
|
||||
10.1,
|
||||
11.1,
|
||||
12.1,
|
||||
13.1,
|
||||
14.1,
|
||||
15.1,
|
||||
16.1,
|
||||
17.1,
|
||||
18.1,
|
||||
19.1,
|
||||
20.1,
|
||||
21.1,
|
||||
22.1,
|
||||
23.1,
|
||||
24.1,
|
||||
25.1,
|
||||
26.1,
|
||||
27.1,
|
||||
28.1,
|
||||
29.1,
|
||||
30.1,
|
||||
31.1,
|
||||
32.1,
|
||||
33.1,
|
||||
34.1,
|
||||
35.1,
|
||||
36.1,
|
||||
37.1,
|
||||
38.1,
|
||||
39.1,
|
||||
40.1,
|
||||
41.1,
|
||||
42.1,
|
||||
43.1,
|
||||
44.1,
|
||||
45.1,
|
||||
46.1,
|
||||
47.1,
|
||||
48.1,
|
||||
49.1,
|
||||
50.1,
|
||||
51.1,
|
||||
52.1,
|
||||
53.1,
|
||||
54.1,
|
||||
55.1,
|
||||
56.1,
|
||||
57.1,
|
||||
58.1,
|
||||
59.1,
|
||||
60.1,
|
||||
61.1,
|
||||
62.1,
|
||||
63.1,
|
||||
64.1,
|
||||
65.1,
|
||||
66.1,
|
||||
67.1,
|
||||
68.1,
|
||||
69.1,
|
||||
70.1,
|
||||
71.1,
|
||||
72.1,
|
||||
73.1,
|
||||
74.1,
|
||||
75.1,
|
||||
76.1,
|
||||
77.1,
|
||||
78.1,
|
||||
79.1,
|
||||
80.1,
|
||||
81.1,
|
||||
82.1,
|
||||
83.1,
|
||||
84.1,
|
||||
85.1,
|
||||
86.1,
|
||||
87.1,
|
||||
88.1,
|
||||
89.1,
|
||||
90.1,
|
||||
91.1,
|
||||
92.1,
|
||||
93.1,
|
||||
94.1,
|
||||
95.1,
|
||||
96.1,
|
||||
97.1,
|
||||
98.1,
|
||||
99.1,
|
||||
);
|
||||
});
|
||||
|
||||
// Objects
|
||||
bench("object: Array.of(obj x5)", () => {
|
||||
sink = Array.of({ a: 1 }, { a: 2 }, { a: 3 }, { a: 4 }, { a: 5 });
|
||||
});
|
||||
|
||||
bench("object: Array.of(100 elements)", () => {
|
||||
sink = Array.of(
|
||||
{ a: 0 },
|
||||
{ a: 1 },
|
||||
{ a: 2 },
|
||||
{ a: 3 },
|
||||
{ a: 4 },
|
||||
{ a: 5 },
|
||||
{ a: 6 },
|
||||
{ a: 7 },
|
||||
{ a: 8 },
|
||||
{ a: 9 },
|
||||
{ a: 10 },
|
||||
{ a: 11 },
|
||||
{ a: 12 },
|
||||
{ a: 13 },
|
||||
{ a: 14 },
|
||||
{ a: 15 },
|
||||
{ a: 16 },
|
||||
{ a: 17 },
|
||||
{ a: 18 },
|
||||
{ a: 19 },
|
||||
{ a: 20 },
|
||||
{ a: 21 },
|
||||
{ a: 22 },
|
||||
{ a: 23 },
|
||||
{ a: 24 },
|
||||
{ a: 25 },
|
||||
{ a: 26 },
|
||||
{ a: 27 },
|
||||
{ a: 28 },
|
||||
{ a: 29 },
|
||||
{ a: 30 },
|
||||
{ a: 31 },
|
||||
{ a: 32 },
|
||||
{ a: 33 },
|
||||
{ a: 34 },
|
||||
{ a: 35 },
|
||||
{ a: 36 },
|
||||
{ a: 37 },
|
||||
{ a: 38 },
|
||||
{ a: 39 },
|
||||
{ a: 40 },
|
||||
{ a: 41 },
|
||||
{ a: 42 },
|
||||
{ a: 43 },
|
||||
{ a: 44 },
|
||||
{ a: 45 },
|
||||
{ a: 46 },
|
||||
{ a: 47 },
|
||||
{ a: 48 },
|
||||
{ a: 49 },
|
||||
{ a: 50 },
|
||||
{ a: 51 },
|
||||
{ a: 52 },
|
||||
{ a: 53 },
|
||||
{ a: 54 },
|
||||
{ a: 55 },
|
||||
{ a: 56 },
|
||||
{ a: 57 },
|
||||
{ a: 58 },
|
||||
{ a: 59 },
|
||||
{ a: 60 },
|
||||
{ a: 61 },
|
||||
{ a: 62 },
|
||||
{ a: 63 },
|
||||
{ a: 64 },
|
||||
{ a: 65 },
|
||||
{ a: 66 },
|
||||
{ a: 67 },
|
||||
{ a: 68 },
|
||||
{ a: 69 },
|
||||
{ a: 70 },
|
||||
{ a: 71 },
|
||||
{ a: 72 },
|
||||
{ a: 73 },
|
||||
{ a: 74 },
|
||||
{ a: 75 },
|
||||
{ a: 76 },
|
||||
{ a: 77 },
|
||||
{ a: 78 },
|
||||
{ a: 79 },
|
||||
{ a: 80 },
|
||||
{ a: 81 },
|
||||
{ a: 82 },
|
||||
{ a: 83 },
|
||||
{ a: 84 },
|
||||
{ a: 85 },
|
||||
{ a: 86 },
|
||||
{ a: 87 },
|
||||
{ a: 88 },
|
||||
{ a: 89 },
|
||||
{ a: 90 },
|
||||
{ a: 91 },
|
||||
{ a: 92 },
|
||||
{ a: 93 },
|
||||
{ a: 94 },
|
||||
{ a: 95 },
|
||||
{ a: 96 },
|
||||
{ a: 97 },
|
||||
{ a: 98 },
|
||||
{ a: 99 },
|
||||
);
|
||||
});
|
||||
|
||||
await run();
|
||||
4
bench/snippets/ipc-json-child.mjs
Normal file
4
bench/snippets/ipc-json-child.mjs
Normal file
@@ -0,0 +1,4 @@
|
||||
// Child process for IPC benchmarks - echoes messages back to parent
|
||||
process.on("message", message => {
|
||||
process.send(message);
|
||||
});
|
||||
45
bench/snippets/ipc-json.mjs
Normal file
45
bench/snippets/ipc-json.mjs
Normal file
@@ -0,0 +1,45 @@
|
||||
import { fork } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const childPath = path.join(__dirname, "ipc-json-child.mjs");
|
||||
|
||||
const smallMessage = { type: "ping", id: 1 };
|
||||
const largeString = Buffer.alloc(10 * 1024 * 1024, "A").toString();
|
||||
const largeMessage = { type: "ping", id: 1, data: largeString };
|
||||
|
||||
async function runBenchmark(message, count) {
|
||||
let received = 0;
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
|
||||
const child = fork(childPath, [], {
|
||||
stdio: ["ignore", "ignore", "ignore", "ipc"],
|
||||
serialization: "json",
|
||||
});
|
||||
|
||||
child.on("message", () => {
|
||||
received++;
|
||||
if (received >= count) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
child.send(message);
|
||||
}
|
||||
|
||||
await promise;
|
||||
child.kill();
|
||||
}
|
||||
|
||||
bench("ipc json - small messages (1000 roundtrips)", async () => {
|
||||
await runBenchmark(smallMessage, 1000);
|
||||
});
|
||||
|
||||
bench("ipc json - 10MB messages (10 roundtrips)", async () => {
|
||||
await runBenchmark(largeMessage, 10);
|
||||
});
|
||||
|
||||
await run();
|
||||
57
bench/snippets/object-hasown.mjs
Normal file
57
bench/snippets/object-hasown.mjs
Normal file
@@ -0,0 +1,57 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const obj = { a: 1, b: 2, c: 3 };
|
||||
const objDeep = { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6, g: 7, h: 8 };
|
||||
const sym = Symbol("test");
|
||||
const objWithSymbol = { [sym]: 1, a: 2 };
|
||||
|
||||
const objs = [
|
||||
{ f: 50 },
|
||||
{ f: 50, g: 70 },
|
||||
{ g: 50, f: 70 },
|
||||
{ h: 50, f: 70 },
|
||||
{ z: 50, f: 70 },
|
||||
{ k: 50, f: 70 },
|
||||
];
|
||||
|
||||
bench("Object.hasOwn - hit", () => {
|
||||
return Object.hasOwn(obj, "a");
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - miss", () => {
|
||||
return Object.hasOwn(obj, "z");
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - symbol hit", () => {
|
||||
return Object.hasOwn(objWithSymbol, sym);
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - symbol miss", () => {
|
||||
return Object.hasOwn(objWithSymbol, Symbol("other"));
|
||||
});
|
||||
|
||||
bench("Object.hasOwn - multiple shapes", () => {
|
||||
let result = true;
|
||||
for (let i = 0; i < objs.length; i++) {
|
||||
result = Object.hasOwn(objs[i], "f") && result;
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
bench("Object.prototype.hasOwnProperty - hit", () => {
|
||||
return obj.hasOwnProperty("a");
|
||||
});
|
||||
|
||||
bench("Object.prototype.hasOwnProperty - miss", () => {
|
||||
return obj.hasOwnProperty("z");
|
||||
});
|
||||
|
||||
bench("in operator - hit", () => {
|
||||
return "a" in obj;
|
||||
});
|
||||
|
||||
bench("in operator - miss", () => {
|
||||
return "z" in obj;
|
||||
});
|
||||
|
||||
await run();
|
||||
7
bench/snippets/promise-race.mjs
Normal file
7
bench/snippets/promise-race.mjs
Normal file
@@ -0,0 +1,7 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("Promise.race([p1, p2])", async function () {
|
||||
return await Promise.race([Promise.resolve(1), Promise.resolve(2)]);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -112,12 +112,40 @@ const obj = {
|
||||
},
|
||||
};
|
||||
|
||||
bench("Response.json(obj)", async () => {
|
||||
const smallObj = { id: 1, name: "test" };
|
||||
|
||||
const arrayObj = {
|
||||
items: Array.from({ length: 100 }, (_, i) => ({ id: i, value: `item-${i}` })),
|
||||
};
|
||||
|
||||
bench("Response.json(obj)", () => {
|
||||
return Response.json(obj);
|
||||
});
|
||||
|
||||
bench("Response.json(obj).json()", async () => {
|
||||
return await Response.json(obj).json();
|
||||
bench("new Response(JSON.stringify(obj))", () => {
|
||||
return new Response(JSON.stringify(obj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
});
|
||||
|
||||
bench("Response.json(smallObj)", () => {
|
||||
return Response.json(smallObj);
|
||||
});
|
||||
|
||||
bench("new Response(JSON.stringify(smallObj))", () => {
|
||||
return new Response(JSON.stringify(smallObj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
});
|
||||
|
||||
bench("Response.json(arrayObj)", () => {
|
||||
return Response.json(arrayObj);
|
||||
});
|
||||
|
||||
bench("new Response(JSON.stringify(arrayObj))", () => {
|
||||
return new Response(JSON.stringify(arrayObj), {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
34
bench/snippets/string-includes.mjs
Normal file
34
bench/snippets/string-includes.mjs
Normal file
@@ -0,0 +1,34 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const shortStr = "The quick brown fox jumps over the lazy dog";
|
||||
const longStr = shortStr.repeat(100);
|
||||
|
||||
bench("String.includes - short, hit (middle)", () => {
|
||||
return shortStr.includes("jumps");
|
||||
});
|
||||
|
||||
bench("String.includes - short, hit (start)", () => {
|
||||
return shortStr.includes("The");
|
||||
});
|
||||
|
||||
bench("String.includes - short, hit (end)", () => {
|
||||
return shortStr.includes("dog");
|
||||
});
|
||||
|
||||
bench("String.includes - short, miss", () => {
|
||||
return shortStr.includes("cat");
|
||||
});
|
||||
|
||||
bench("String.includes - long, hit (middle)", () => {
|
||||
return longStr.includes("jumps");
|
||||
});
|
||||
|
||||
bench("String.includes - long, miss", () => {
|
||||
return longStr.includes("cat");
|
||||
});
|
||||
|
||||
bench("String.includes - with position", () => {
|
||||
return shortStr.includes("fox", 10);
|
||||
});
|
||||
|
||||
await run();
|
||||
48
bench/snippets/urlpattern.js
Normal file
48
bench/snippets/urlpattern.js
Normal file
@@ -0,0 +1,48 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const patterns = [
|
||||
{ name: "string pattern", input: "https://(sub.)?example(.com/)foo" },
|
||||
{ name: "hostname IDN", input: { hostname: "xn--caf-dma.com" } },
|
||||
{
|
||||
name: "pathname + search + hash + baseURL",
|
||||
input: {
|
||||
pathname: "/foo",
|
||||
search: "bar",
|
||||
hash: "baz",
|
||||
baseURL: "https://example.com:8080",
|
||||
},
|
||||
},
|
||||
{ name: "pathname with regex", input: { pathname: "/([[a-z]--a])" } },
|
||||
{ name: "named groups", input: { pathname: "/users/:id/posts/:postId" } },
|
||||
{ name: "wildcard", input: { pathname: "/files/*" } },
|
||||
];
|
||||
|
||||
const testURL = "https://sub.example.com/foo";
|
||||
|
||||
group("URLPattern parse (constructor)", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
bench(name, () => {
|
||||
return new URLPattern(input);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("URLPattern.test()", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
const pattern = new URLPattern(input);
|
||||
bench(name, () => {
|
||||
return pattern.test(testURL);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group("URLPattern.exec()", () => {
|
||||
for (const { name, input } of patterns) {
|
||||
const pattern = new URLPattern(input);
|
||||
bench(name, () => {
|
||||
return pattern.exec(testURL);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -607,7 +607,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0;
|
||||
}
|
||||
|
||||
obj.no_link_obj = opts.os != .windows;
|
||||
obj.no_link_obj = opts.os != .windows and !opts.no_llvm;
|
||||
|
||||
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
|
||||
3
bun.lock
3
bun.lock
@@ -36,6 +36,7 @@
|
||||
},
|
||||
"overrides": {
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"@types/node": "25.0.0",
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
},
|
||||
"packages": {
|
||||
@@ -155,7 +156,7 @@
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="],
|
||||
"@types/node": ["@types/node@25.0.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-rl78HwuZlaDIUSeUKkmogkhebA+8K1Hy7tddZuJ3D0xV8pZSfsYGTsliGUol1JPzu9EKnTxPC4L1fiWouStRew=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
|
||||
@@ -419,12 +419,9 @@ execute_process(
|
||||
--command=list-outputs
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
RESULT_VARIABLE bindgen_result
|
||||
OUTPUT_VARIABLE bindgen_outputs
|
||||
COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
if(${bindgen_result})
|
||||
message(FATAL_ERROR "bindgenv2/script.ts exited with non-zero status")
|
||||
endif()
|
||||
foreach(output IN LISTS bindgen_outputs)
|
||||
if(output MATCHES "\.cpp$")
|
||||
list(APPEND BUN_BINDGENV2_CPP_OUTPUTS ${output})
|
||||
@@ -872,6 +869,7 @@ target_include_directories(${bun} PRIVATE
|
||||
${CODEGEN_PATH}
|
||||
${VENDOR_PATH}
|
||||
${VENDOR_PATH}/picohttpparser
|
||||
${VENDOR_PATH}/zlib
|
||||
${NODEJS_HEADERS_PATH}/include
|
||||
${NODEJS_HEADERS_PATH}/include/node
|
||||
)
|
||||
@@ -1199,6 +1197,29 @@ set_target_properties(${bun} PROPERTIES LINK_DEPENDS ${BUN_SYMBOLS_PATH})
|
||||
|
||||
include(SetupWebKit)
|
||||
|
||||
if(BUN_LINK_ONLY)
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
TARGET_PHASE
|
||||
POST_BUILD
|
||||
COMMENT
|
||||
"Uploading link metadata"
|
||||
COMMAND
|
||||
${CMAKE_COMMAND} -E env
|
||||
BUN_VERSION=${VERSION}
|
||||
WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
|
||||
WEBKIT_VERSION=${WEBKIT_VERSION}
|
||||
ZIG_COMMIT=${ZIG_COMMIT}
|
||||
${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
|
||||
SOURCES
|
||||
${BUN_ZIG_OUTPUT}
|
||||
${BUN_CPP_OUTPUT}
|
||||
ARTIFACTS
|
||||
${BUILD_PATH}/link-metadata.json
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
c-ares/c-ares
|
||||
COMMIT
|
||||
d3a507e920e7af18a5efb7f9f1d8044ed4750013
|
||||
3ac47ee46edd8ea40370222f91613fc16c434853
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -48,6 +48,9 @@ if(NOT BUILDKITE_BUILD_STATUS EQUAL 0)
|
||||
endif()
|
||||
|
||||
file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD)
|
||||
# Escape backslashes so CMake doesn't interpret JSON escape sequences (e.g., \n in commit messages)
|
||||
string(REPLACE "\\" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
|
||||
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
|
||||
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)
|
||||
string(JSON BUILDKITE_JOBS_COUNT LENGTH ${BUILDKITE_JOBS})
|
||||
|
||||
@@ -5,18 +5,12 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (CI AND NOT APPLE)
|
||||
setenv(CCACHE_DISABLE 1)
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
CCACHE_PROGRAM
|
||||
COMMAND
|
||||
ccache
|
||||
REQUIRED
|
||||
${CI}
|
||||
)
|
||||
|
||||
if(NOT CCACHE_PROGRAM)
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
# Setup sccache as the C and C++ compiler launcher to speed up builds by caching
|
||||
if(CACHE_STRATEGY STREQUAL "none")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(SCCACHE_SHARED_CACHE_REGION "us-west-1")
|
||||
set(SCCACHE_SHARED_CACHE_BUCKET "bun-build-sccache-store")
|
||||
|
||||
# Function to check if the system AWS credentials have access to the sccache S3 bucket.
|
||||
function(check_aws_credentials OUT_VAR)
|
||||
# Install dependencies first
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} install --frozen-lockfile
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/scripts/build-cache
|
||||
RESULT_VARIABLE INSTALL_EXIT_CODE
|
||||
OUTPUT_VARIABLE INSTALL_OUTPUT
|
||||
ERROR_VARIABLE INSTALL_ERROR
|
||||
)
|
||||
|
||||
if(NOT INSTALL_EXIT_CODE EQUAL 0)
|
||||
message(FATAL_ERROR "Failed to install dependencies in scripts/build-cache\n"
|
||||
"Exit code: ${INSTALL_EXIT_CODE}\n"
|
||||
"Output: ${INSTALL_OUTPUT}\n"
|
||||
"Error: ${INSTALL_ERROR}")
|
||||
endif()
|
||||
|
||||
# Check AWS credentials
|
||||
execute_process(
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
have-access.ts
|
||||
--bucket ${SCCACHE_SHARED_CACHE_BUCKET}
|
||||
--region ${SCCACHE_SHARED_CACHE_REGION}
|
||||
WORKING_DIRECTORY
|
||||
${CMAKE_SOURCE_DIR}/scripts/build-cache
|
||||
RESULT_VARIABLE HAVE_ACCESS_EXIT_CODE
|
||||
)
|
||||
|
||||
if(HAVE_ACCESS_EXIT_CODE EQUAL 0)
|
||||
set(HAS_CREDENTIALS TRUE)
|
||||
else()
|
||||
set(HAS_CREDENTIALS FALSE)
|
||||
endif()
|
||||
|
||||
set(${OUT_VAR} ${HAS_CREDENTIALS} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Configure sccache to use the local cache only.
|
||||
function(sccache_configure_local_filesystem)
|
||||
unsetenv(SCCACHE_BUCKET)
|
||||
unsetenv(SCCACHE_REGION)
|
||||
setenv(SCCACHE_DIR "${CACHE_PATH}/sccache")
|
||||
endfunction()
|
||||
|
||||
# Configure sccache to use the distributed cache (S3 + local).
|
||||
function(sccache_configure_distributed)
|
||||
setenv(SCCACHE_BUCKET "${SCCACHE_SHARED_CACHE_BUCKET}")
|
||||
setenv(SCCACHE_REGION "${SCCACHE_SHARED_CACHE_REGION}")
|
||||
setenv(SCCACHE_DIR "${CACHE_PATH}/sccache")
|
||||
endfunction()
|
||||
|
||||
function(sccache_configure_environment_ci)
|
||||
if(CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "distributed")
|
||||
check_aws_credentials(HAS_AWS_CREDENTIALS)
|
||||
if(HAS_AWS_CREDENTIALS)
|
||||
sccache_configure_distributed()
|
||||
message(NOTICE "sccache: Using distributed cache strategy.")
|
||||
else()
|
||||
message(FATAL_ERROR "CI CACHE_STRATEGY is set to '${CACHE_STRATEGY}', but no valid AWS "
|
||||
"credentials were found. Note that 'auto' requires AWS credentials to access the shared "
|
||||
"cache in CI.")
|
||||
endif()
|
||||
elseif(CACHE_STRATEGY STREQUAL "local")
|
||||
# We disallow this because we want our CI runs to always used the shared cache to accelerate
|
||||
# builds.
|
||||
# none, distributed and auto are all okay.
|
||||
#
|
||||
# If local is configured, it's as good as "none", so this is probably user error.
|
||||
message(FATAL_ERROR "CI CACHE_STRATEGY is set to 'local', which is not allowed.")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
function(sccache_configure_environment_developer)
|
||||
# Local environments can use any strategy they like. S3 is set up in such a way so as to clean
|
||||
# itself from old entries automatically.
|
||||
if (CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "local")
|
||||
# In the local environment, we prioritize using the local cache. This is because sccache takes
|
||||
# into consideration the whole absolute path of the files being compiled, and it's very
|
||||
# unlikely users will have the same absolute paths on their local machines.
|
||||
sccache_configure_local_filesystem()
|
||||
message(NOTICE "sccache: Using local cache strategy.")
|
||||
elseif(CACHE_STRATEGY STREQUAL "distributed")
|
||||
check_aws_credentials(HAS_AWS_CREDENTIALS)
|
||||
if(HAS_AWS_CREDENTIALS)
|
||||
sccache_configure_distributed()
|
||||
message(NOTICE "sccache: Using distributed cache strategy.")
|
||||
else()
|
||||
message(FATAL_ERROR "CACHE_STRATEGY is set to 'distributed', but no valid AWS credentials "
|
||||
"were found.")
|
||||
endif()
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
find_command(VARIABLE SCCACHE_PROGRAM COMMAND sccache REQUIRED ${CI})
|
||||
if(NOT SCCACHE_PROGRAM)
|
||||
message(WARNING "sccache not found. Your builds will be slower.")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(SCCACHE_ARGS CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER)
|
||||
foreach(arg ${SCCACHE_ARGS})
|
||||
setx(${arg} ${SCCACHE_PROGRAM})
|
||||
list(APPEND CMAKE_ARGS -D${arg}=${${arg}})
|
||||
endforeach()
|
||||
|
||||
setenv(SCCACHE_LOG "info")
|
||||
|
||||
if (CI)
|
||||
sccache_configure_environment_ci()
|
||||
else()
|
||||
sccache_configure_environment_developer()
|
||||
endif()
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 6d0f3aac0b817cc01a846b3754b21271adedac12)
|
||||
set(WEBKIT_VERSION 1d0216219a3c52cb85195f48f19ba7d5db747ff7)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
@@ -28,6 +28,7 @@ if(WEBKIT_LOCAL)
|
||||
# make jsc-compile-debug jsc-copy-headers
|
||||
include_directories(
|
||||
${WEBKIT_PATH}
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers
|
||||
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
|
||||
${WEBKIT_PATH}/bmalloc/Headers
|
||||
@@ -90,7 +91,14 @@ if(EXISTS ${WEBKIT_PATH}/package.json)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
|
||||
file(
|
||||
DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS
|
||||
STATUS WEBKIT_DOWNLOAD_STATUS
|
||||
)
|
||||
if(NOT "${WEBKIT_DOWNLOAD_STATUS}" MATCHES "^0;")
|
||||
message(FATAL_ERROR "Failed to download WebKit: ${WEBKIT_DOWNLOAD_STATUS}")
|
||||
endif()
|
||||
|
||||
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
|
||||
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})
|
||||
file(REMOVE_RECURSE ${WEBKIT_PATH})
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bookworm-slim AS build
|
||||
FROM debian:trixie-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -55,7 +55,7 @@ RUN apt-get update -qq \
|
||||
&& which bun \
|
||||
&& bun --version
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
FROM debian:trixie-slim
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bookworm-slim AS build
|
||||
FROM debian:trixie-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -56,7 +56,7 @@ RUN apt-get update -qq \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun
|
||||
|
||||
FROM debian:bookworm
|
||||
FROM debian:trixie
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM debian:bookworm-slim AS build
|
||||
FROM debian:trixie-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -55,7 +55,7 @@ RUN apt-get update -qq \
|
||||
&& which bun \
|
||||
&& bun --version
|
||||
|
||||
FROM gcr.io/distroless/base-nossl-debian11
|
||||
FROM gcr.io/distroless/base-nossl-debian13
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
@@ -71,6 +71,7 @@ ENV PATH "${PATH}:/usr/local/bun-node-fallback-bin"
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
RUN --mount=type=bind,from=build,source=/usr/bin,target=/usr/bin \
|
||||
--mount=type=bind,from=build,source=/etc/alternatives/which,target=/etc/alternatives/which \
|
||||
--mount=type=bind,from=build,source=/bin,target=/bin \
|
||||
--mount=type=bind,from=build,source=/usr/lib,target=/usr/lib \
|
||||
--mount=type=bind,from=build,source=/lib,target=/lib \
|
||||
|
||||
@@ -65,6 +65,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
| `--chunk-names` | `--chunk-naming` | Renamed for consistency with naming in JS API |
|
||||
| `--color` | n/a | Always enabled |
|
||||
| `--drop` | `--drop` | |
|
||||
| n/a | `--feature` | Bun-specific. Enable feature flags for compile-time dead-code elimination via `import { feature } from "bun:bundle"` |
|
||||
| `--entry-names` | `--entry-naming` | Renamed for consistency with naming in JS API |
|
||||
| `--global-name` | n/a | Not applicable, Bun does not support `iife` output at this time |
|
||||
| `--ignore-annotations` | `--ignore-dce-annotations` | |
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -427,8 +427,8 @@ This will allow you to use TailwindCSS utility classes in your HTML and CSS file
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="tailwindcss" />
|
||||
<!-- [!code ++] -->
|
||||
<link rel="stylesheet" href="tailwindcss" />
|
||||
</head>
|
||||
<!-- the rest of your HTML... -->
|
||||
</html>
|
||||
@@ -448,8 +448,8 @@ Alternatively, you can import TailwindCSS in your CSS file:
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="./style.css" />
|
||||
<!-- [!code ++] -->
|
||||
<link rel="stylesheet" href="./style.css" />
|
||||
</head>
|
||||
<!-- the rest of your HTML... -->
|
||||
</html>
|
||||
@@ -492,6 +492,28 @@ Bun will lazily resolve and load each plugin and use them to bundle your routes.
|
||||
the CLI.
|
||||
</Note>
|
||||
|
||||
## Inline Environment Variables
|
||||
|
||||
Bun can replace `process.env.*` references in your frontend JavaScript and TypeScript with their actual values at build time. Configure the `env` option in your `bunfig.toml`:
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
[serve.static]
|
||||
env = "PUBLIC_*" # only inline env vars starting with PUBLIC_ (recommended)
|
||||
# env = "inline" # inline all environment variables
|
||||
# env = "disable" # disable env var replacement (default)
|
||||
```
|
||||
|
||||
<Note>
|
||||
This only works with literal `process.env.FOO` references, not `import.meta.env` or indirect access like `const env =
|
||||
process.env; env.FOO`.
|
||||
|
||||
If an environment variable is not set, you may see runtime errors like `ReferenceError: process
|
||||
is not defined` in the browser.
|
||||
|
||||
</Note>
|
||||
|
||||
See the [HTML & static sites documentation](/bundler/html-static#inline-environment-variables) for more details on build-time configuration and examples.
|
||||
|
||||
## How It Works
|
||||
|
||||
Bun uses `HTMLRewriter` to scan for `<script>` and `<link>` tags in HTML files, uses them as entrypoints for Bun's bundler, generates an optimized bundle for the JavaScript/TypeScript/TSX/JSX and CSS files, and serves the result.
|
||||
|
||||
@@ -262,6 +262,93 @@ Then, reference TailwindCSS in your HTML via `<link>` tag, `@import` in CSS, or
|
||||
|
||||
<Info>Only one of those are necessary, not all three.</Info>
|
||||
|
||||
## Inline environment variables
|
||||
|
||||
Bun can replace `process.env.*` references in your JavaScript and TypeScript with their actual values at build time. This is useful for injecting configuration like API URLs or feature flags into your frontend code.
|
||||
|
||||
### Dev server (runtime)
|
||||
|
||||
To inline environment variables when using `bun ./index.html`, configure the `env` option in your `bunfig.toml`:
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
[serve.static]
|
||||
env = "PUBLIC_*" # only inline env vars starting with PUBLIC_ (recommended)
|
||||
# env = "inline" # inline all environment variables
|
||||
# env = "disable" # disable env var replacement (default)
|
||||
```
|
||||
|
||||
<Note>
|
||||
This only works with literal `process.env.FOO` references, not `import.meta.env` or indirect access like `const env =
|
||||
process.env; env.FOO`.
|
||||
|
||||
If an environment variable is not set, you may see runtime errors like `ReferenceError: process
|
||||
is not defined` in the browser.
|
||||
|
||||
</Note>
|
||||
|
||||
Then run the dev server:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
PUBLIC_API_URL=https://api.example.com bun ./index.html
|
||||
```
|
||||
|
||||
### Build for production
|
||||
|
||||
When building static HTML for production, use the `env` option to inline environment variables:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="CLI">
|
||||
```bash terminal icon="terminal"
|
||||
# Inline all environment variables
|
||||
bun build ./index.html --outdir=dist --env=inline
|
||||
|
||||
# Only inline env vars with a specific prefix (recommended)
|
||||
bun build ./index.html --outdir=dist --env=PUBLIC_*
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="API">
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// Inline all environment variables
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.html"],
|
||||
outdir: "./dist",
|
||||
env: "inline", // [!code highlight]
|
||||
});
|
||||
|
||||
// Only inline env vars with a specific prefix (recommended)
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.html"],
|
||||
outdir: "./dist",
|
||||
env: "PUBLIC_*", // [!code highlight]
|
||||
});
|
||||
```
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### Example
|
||||
|
||||
Given this source file:
|
||||
|
||||
```ts title="app.ts" icon="/icons/typescript.svg"
|
||||
const apiUrl = process.env.PUBLIC_API_URL;
|
||||
console.log(`API URL: ${apiUrl}`);
|
||||
```
|
||||
|
||||
And running with `PUBLIC_API_URL=https://api.example.com`:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
PUBLIC_API_URL=https://api.example.com bun build ./index.html --outdir=dist --env=PUBLIC_*
|
||||
```
|
||||
|
||||
The bundled output will contain:
|
||||
|
||||
```js title="dist/app.js" icon="/icons/javascript.svg"
|
||||
const apiUrl = "https://api.example.com";
|
||||
console.log(`API URL: ${apiUrl}`);
|
||||
```
|
||||
|
||||
## Echo console logs from browser to terminal
|
||||
|
||||
Bun's dev server supports streaming console logs from the browser to the terminal.
|
||||
|
||||
@@ -220,6 +220,78 @@ An array of paths corresponding to the entrypoints of our application. One bundl
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### files
|
||||
|
||||
A map of file paths to their contents for in-memory bundling. This allows you to bundle virtual files that don't exist on disk, or override the contents of files that do exist. This option is only available in the JavaScript API.
|
||||
|
||||
File contents can be provided as a `string`, `Blob`, `TypedArray`, or `ArrayBuffer`.
|
||||
|
||||
#### Bundle entirely from memory
|
||||
|
||||
You can bundle code without any files on disk by providing all sources via `files`:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["/app/index.ts"],
|
||||
files: {
|
||||
"/app/index.ts": `
|
||||
import { greet } from "./greet.ts";
|
||||
console.log(greet("World"));
|
||||
`,
|
||||
"/app/greet.ts": `
|
||||
export function greet(name: string) {
|
||||
return "Hello, " + name + "!";
|
||||
}
|
||||
`,
|
||||
},
|
||||
});
|
||||
|
||||
const output = await result.outputs[0].text();
|
||||
console.log(output);
|
||||
```
|
||||
|
||||
When all entrypoints are in the `files` map, the current working directory is used as the root.
|
||||
|
||||
#### Override files on disk
|
||||
|
||||
In-memory files take priority over files on disk. This lets you override specific files while keeping the rest of your codebase unchanged:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// Assume ./src/config.ts exists on disk with development settings
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
files: {
|
||||
// Override config.ts with production values
|
||||
"./src/config.ts": `
|
||||
export const API_URL = "https://api.production.com";
|
||||
export const DEBUG = false;
|
||||
`,
|
||||
},
|
||||
outdir: "./dist",
|
||||
});
|
||||
```
|
||||
|
||||
#### Mix disk and virtual files
|
||||
|
||||
Real files on disk can import virtual files, and virtual files can import real files:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// ./src/index.ts exists on disk and imports "./generated.ts"
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
files: {
|
||||
// Provide a virtual file that index.ts imports
|
||||
"./src/generated.ts": `
|
||||
export const BUILD_ID = "${crypto.randomUUID()}";
|
||||
export const BUILD_TIME = ${Date.now()};
|
||||
`,
|
||||
},
|
||||
outdir: "./dist",
|
||||
});
|
||||
```
|
||||
|
||||
This is useful for code generation, injecting build-time constants, or testing with mock modules.
|
||||
|
||||
### outdir
|
||||
|
||||
The directory where output files will be written.
|
||||
@@ -1141,6 +1213,157 @@ Remove function calls from a bundle. For example, `--drop=console` will remove a
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### features
|
||||
|
||||
Enable compile-time feature flags for dead-code elimination. This provides a way to conditionally include or exclude code paths at bundle time using `import { feature } from "bun:bundle"`.
|
||||
|
||||
```ts title="app.ts" icon="/icons/typescript.svg"
|
||||
import { feature } from "bun:bundle";
|
||||
|
||||
if (feature("PREMIUM")) {
|
||||
// Only included when PREMIUM flag is enabled
|
||||
initPremiumFeatures();
|
||||
}
|
||||
|
||||
if (feature("DEBUG")) {
|
||||
// Only included when DEBUG flag is enabled
|
||||
console.log("Debug mode");
|
||||
}
|
||||
```
|
||||
|
||||
<Tabs>
|
||||
<Tab title="JavaScript">
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
await Bun.build({
|
||||
entrypoints: ['./app.ts'],
|
||||
outdir: './out',
|
||||
features: ["PREMIUM"], // PREMIUM=true, DEBUG=false
|
||||
})
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="CLI">
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./app.ts --outdir ./out --feature PREMIUM
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
The `feature()` function is replaced with `true` or `false` at bundle time. Combined with minification, unreachable code is eliminated:
|
||||
|
||||
```ts title="Input" icon="/icons/typescript.svg"
|
||||
import { feature } from "bun:bundle";
|
||||
const mode = feature("PREMIUM") ? "premium" : "free";
|
||||
```
|
||||
|
||||
```js title="Output (with --feature PREMIUM --minify)" icon="/icons/javascript.svg"
|
||||
var mode = "premium";
|
||||
```
|
||||
|
||||
```js title="Output (without --feature PREMIUM, with --minify)" icon="/icons/javascript.svg"
|
||||
var mode = "free";
|
||||
```
|
||||
|
||||
**Key behaviors:**
|
||||
|
||||
- `feature()` requires a string literal argument — dynamic values are not supported
|
||||
- The `bun:bundle` import is completely removed from the output
|
||||
- Works with `bun build`, `bun run`, and `bun test`
|
||||
- Multiple flags can be enabled: `--feature FLAG_A --feature FLAG_B`
|
||||
- For type safety, augment the `Registry` interface to restrict `feature()` to known flags (see below)
|
||||
|
||||
**Use cases:**
|
||||
|
||||
- Platform-specific code (`feature("SERVER")` vs `feature("CLIENT")`)
|
||||
- Environment-based features (`feature("DEVELOPMENT")`)
|
||||
- Gradual feature rollouts
|
||||
- A/B testing variants
|
||||
- Paid tier features
|
||||
|
||||
**Type safety:** By default, `feature()` accepts any string. To get autocomplete and catch typos at compile time, create an `env.d.ts` file (or add to an existing `.d.ts`) and augment the `Registry` interface:
|
||||
|
||||
```ts title="env.d.ts" icon="/icons/typescript.svg"
|
||||
declare module "bun:bundle" {
|
||||
interface Registry {
|
||||
features: "DEBUG" | "PREMIUM" | "BETA_FEATURES";
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Ensure the file is included in your `tsconfig.json` (e.g., `"include": ["src", "env.d.ts"]`). Now `feature()` only accepts those flags, and invalid strings like `feature("TYPO")` become type errors.
|
||||
|
||||
### metafile
|
||||
|
||||
Generate metadata about the build in a structured format. The metafile contains information about all input files, output files, their sizes, imports, and exports. This is useful for:
|
||||
|
||||
- **Bundle analysis**: Understand what's contributing to bundle size
|
||||
- **Visualization**: Feed into tools like [esbuild's bundle analyzer](https://esbuild.github.io/analyze/) or other visualization tools
|
||||
- **Dependency tracking**: See the full import graph of your application
|
||||
- **CI integration**: Track bundle size changes over time
|
||||
|
||||
<Tabs>
|
||||
<Tab title="JavaScript">
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
const result = await Bun.build({
|
||||
entrypoints: ['./src/index.ts'],
|
||||
outdir: './dist',
|
||||
metafile: true,
|
||||
});
|
||||
|
||||
if (result.metafile) {
|
||||
// Analyze inputs
|
||||
for (const [path, meta] of Object.entries(result.metafile.inputs)) {
|
||||
console.log(`${path}: ${meta.bytes} bytes`);
|
||||
}
|
||||
|
||||
// Analyze outputs
|
||||
for (const [path, meta] of Object.entries(result.metafile.outputs)) {
|
||||
console.log(`${path}: ${meta.bytes} bytes`);
|
||||
}
|
||||
|
||||
// Save for external analysis tools
|
||||
await Bun.write('./dist/meta.json', JSON.stringify(result.metafile));
|
||||
}
|
||||
```
|
||||
|
||||
</Tab>
|
||||
<Tab title="CLI">
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile ./dist/meta.json
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
The metafile structure contains:
|
||||
|
||||
```ts
|
||||
interface BuildMetafile {
|
||||
inputs: {
|
||||
[path: string]: {
|
||||
bytes: number;
|
||||
imports: Array<{
|
||||
path: string;
|
||||
kind: ImportKind;
|
||||
original?: string; // Original specifier before resolution
|
||||
external?: boolean;
|
||||
}>;
|
||||
format?: "esm" | "cjs" | "json" | "css";
|
||||
};
|
||||
};
|
||||
outputs: {
|
||||
[path: string]: {
|
||||
bytes: number;
|
||||
inputs: {
|
||||
[path: string]: { bytesInOutput: number };
|
||||
};
|
||||
imports: Array<{ path: string; kind: ImportKind }>;
|
||||
exports: string[];
|
||||
entryPoint?: string;
|
||||
cssBundle?: string; // Associated CSS file for JS entry points
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Outputs
|
||||
|
||||
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
|
||||
@@ -1150,6 +1373,7 @@ interface BuildOutput {
|
||||
outputs: BuildArtifact[];
|
||||
success: boolean;
|
||||
logs: Array<object>; // see docs for details
|
||||
metafile?: BuildMetafile; // only when metafile: true
|
||||
}
|
||||
|
||||
interface BuildArtifact extends Blob {
|
||||
|
||||
@@ -121,6 +121,7 @@
|
||||
"/runtime/file-io",
|
||||
"/runtime/streams",
|
||||
"/runtime/binary-data",
|
||||
"/runtime/archive",
|
||||
"/runtime/sql",
|
||||
"/runtime/sqlite",
|
||||
"/runtime/s3",
|
||||
@@ -326,6 +327,7 @@
|
||||
"group": "Utilities",
|
||||
"icon": "wrench",
|
||||
"pages": [
|
||||
"/guides/util/upgrade",
|
||||
"/guides/util/detect-bun",
|
||||
"/guides/util/version",
|
||||
"/guides/util/hash-a-password",
|
||||
|
||||
@@ -74,6 +74,12 @@ export default defineNuxtConfig({
|
||||
});
|
||||
```
|
||||
|
||||
Alternatively, you can set the preset via environment variable:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
NITRO_PRESET=bun bun run build
|
||||
```
|
||||
|
||||
<Note>
|
||||
Some packages provide Bun-specific exports that Nitro will not bundle correctly using the default preset. In this
|
||||
case, you need to use Bun preset so that the packages will work correctly in production builds.
|
||||
|
||||
@@ -33,7 +33,7 @@ Alternatively, you can create a PM2 configuration file. Create a file named `pm2
|
||||
|
||||
```js pm2.config.js icon="file-code"
|
||||
module.exports = {
|
||||
title: "app", // Name of your application
|
||||
name: "app", // Name of your application
|
||||
script: "index.ts", // Entry point of your application
|
||||
interpreter: "bun", // Bun interpreter
|
||||
env: {
|
||||
|
||||
@@ -4,63 +4,59 @@ sidebarTitle: "SolidStart with Bun"
|
||||
mode: center
|
||||
---
|
||||
|
||||
<Warning>
|
||||
SolidStart currently relies on Node.js APIs that Bun does not yet implement. The guide below uses Bun to initialize a
|
||||
project and install dependencies, but it uses Node.js to run the dev server.
|
||||
</Warning>
|
||||
|
||||
---
|
||||
|
||||
Initialize a SolidStart app with `create-solid`.
|
||||
Initialize a SolidStart app with `create-solid`. You can specify the `--solidstart` flag to create a SolidStart project, and `--ts` for TypeScript support. When prompted for a template, select `basic` for a minimal starter app.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun create solid my-app
|
||||
bun create solid my-app --solidstart --ts
|
||||
```
|
||||
|
||||
```txt
|
||||
create-solid version 0.2.31
|
||||
|
||||
Welcome to the SolidStart setup wizard!
|
||||
|
||||
There are definitely bugs and some feature might not work yet.
|
||||
If you encounter an issue, have a look at
|
||||
https://github.com/solidjs/solid-start/issues and open a new one,
|
||||
if it is not already tracked.
|
||||
|
||||
✔ Which template do you want to use? › todomvc
|
||||
✔ Server Side Rendering? … yes
|
||||
✔ Use TypeScript? … yes
|
||||
cloned solidjs/solid-start#main to /path/to/my-app/.solid-start
|
||||
✔ Copied project files
|
||||
┌
|
||||
Create-Solid v0.6.11
|
||||
│
|
||||
◇ Project Name
|
||||
│ my-app
|
||||
│
|
||||
◇ Which template would you like to use?
|
||||
│ basic
|
||||
│
|
||||
◇ Project created 🎉
|
||||
│
|
||||
◇ To get started, run: ─╮
|
||||
│ │
|
||||
│ cd my-app │
|
||||
│ bun install │
|
||||
│ bun dev │
|
||||
│ │
|
||||
├────────────────────────╯
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
As instructed by the `create-solid` CLI, let's install our dependencies.
|
||||
As instructed by the `create-solid` CLI, install the dependencies.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
cd my-app
|
||||
bun install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run the development server.
|
||||
Then run the development server with `bun dev`.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun run dev
|
||||
# or, equivalently
|
||||
bunx solid-start dev
|
||||
bun dev
|
||||
```
|
||||
|
||||
---
|
||||
```txt
|
||||
$ vinxi dev
|
||||
vinxi v0.5.8
|
||||
vinxi starting dev server
|
||||
|
||||
➜ Local: http://localhost:3000/
|
||||
➜ Network: use --host to expose
|
||||
```
|
||||
|
||||
Open [localhost:3000](http://localhost:3000). Any changes you make to `src/routes/index.tsx` will be hot-reloaded automatically.
|
||||
|
||||
<Frame>
|
||||

|
||||
</Frame>
|
||||
|
||||
---
|
||||
|
||||
Refer to the [SolidStart website](https://start.solidjs.com/getting-started/what-is-solidstart) for complete framework documentation.
|
||||
Refer to the [SolidStart website](https://docs.solidjs.com/solid-start) for complete framework documentation.
|
||||
|
||||
@@ -9,18 +9,42 @@ In Bun, `fetch` supports sending requests through an HTTP or HTTPS proxy. This i
|
||||
```ts proxy.ts icon="/icons/typescript.svg"
|
||||
await fetch("https://example.com", {
|
||||
// The URL of the proxy server
|
||||
proxy: "https://usertitle:password@proxy.example.com:8080",
|
||||
proxy: "https://username:password@proxy.example.com:8080",
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `proxy` option is a URL string that specifies the proxy server. It can include the username and password if the proxy requires authentication. It can be `http://` or `https://`.
|
||||
The `proxy` option can be a URL string or an object with `url` and optional `headers`. The URL can include the username and password if the proxy requires authentication. It can be `http://` or `https://`.
|
||||
|
||||
---
|
||||
|
||||
## Custom proxy headers
|
||||
|
||||
To send custom headers to the proxy server (useful for proxy authentication tokens, custom routing, etc.), use the object format:
|
||||
|
||||
```ts proxy-headers.ts icon="/icons/typescript.svg"
|
||||
await fetch("https://example.com", {
|
||||
proxy: {
|
||||
url: "https://proxy.example.com:8080",
|
||||
headers: {
|
||||
"Proxy-Authorization": "Bearer my-token",
|
||||
"X-Proxy-Region": "us-east-1",
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The `headers` property accepts a plain object or a `Headers` instance. These headers are sent directly to the proxy server in `CONNECT` requests (for HTTPS targets) or in the proxy request (for HTTP targets).
|
||||
|
||||
If you provide a `Proxy-Authorization` header, it will override any credentials specified in the proxy URL.
|
||||
|
||||
---
|
||||
|
||||
## Environment variables
|
||||
|
||||
You can also set the `$HTTP_PROXY` or `$HTTPS_PROXY` environment variable to the proxy URL. This is useful when you want to use the same proxy for all requests.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
HTTPS_PROXY=https://usertitle:password@proxy.example.com:8080 bun run index.ts
|
||||
HTTPS_PROXY=https://username:password@proxy.example.com:8080 bun run index.ts
|
||||
```
|
||||
|
||||
@@ -8,7 +8,9 @@ Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for i
|
||||
|
||||
<Note>
|
||||
Bun includes a default allowlist of popular packages containing `postinstall` scripts that are known to be safe. You
|
||||
can see this list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt).
|
||||
can see this list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt). This
|
||||
default list only applies to packages installed from npm. For packages from other sources (such as `file:`, `link:`,
|
||||
`git:`, or `github:` dependencies), you must explicitly add them to `trustedDependencies`.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
@@ -14,16 +14,6 @@ process.on("SIGINT", () => {
|
||||
|
||||
---
|
||||
|
||||
If you don't know which signal to listen for, you listen to the umbrella `"exit"` event.
|
||||
|
||||
```ts
|
||||
process.on("exit", code => {
|
||||
console.log(`Process exited with code ${code}`);
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
If you don't know which signal to listen for, you listen to the [`"beforeExit"`](https://nodejs.org/api/process.html#event-beforeexit) and [`"exit"`](https://nodejs.org/api/process.html#event-exit) events.
|
||||
|
||||
```ts
|
||||
|
||||
@@ -60,7 +60,7 @@ test("random", async () => {
|
||||
|
||||
expect(random).toHaveBeenCalled();
|
||||
expect(random).toHaveBeenCalledTimes(3);
|
||||
expect(random.mock.args).toEqual([[1], [2], [3]]);
|
||||
expect(random.mock.calls).toEqual([[1], [2], [3]]);
|
||||
expect(random.mock.results[0]).toEqual({ type: "return", value: a });
|
||||
});
|
||||
```
|
||||
|
||||
@@ -76,7 +76,7 @@ declare module "bun:test" {
|
||||
|
||||
You should now be able to use Testing Library in your tests
|
||||
|
||||
```ts matchers.d.ts icon="/icons/typescript.svg"
|
||||
```tsx myComponent.test.tsx icon="/icons/typescript.svg"
|
||||
import { test, expect } from "bun:test";
|
||||
import { screen, render } from "@testing-library/react";
|
||||
import { MyComponent } from "./myComponent";
|
||||
|
||||
@@ -4,22 +4,25 @@ sidebarTitle: Detect Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
The recommended way to conditionally detect when code is being executed with `bun` is to check for the existence of the `Bun` global.
|
||||
|
||||
This is similar to how you'd check for the existence of the `window` variable to detect when code is being executed in a browser.
|
||||
|
||||
```ts
|
||||
if (typeof Bun !== "undefined") {
|
||||
// this code will only run when the file is run with Bun
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
In TypeScript environments, the previous approach will result in a type error unless `@types/bun` is installed. To avoid this, you can check `process.versions` instead.
|
||||
The recommended way to detect when code is being executed with Bun is to check `process.versions.bun`. This works in both JavaScript and TypeScript without requiring any additional type definitions.
|
||||
|
||||
```ts
|
||||
if (process.versions.bun) {
|
||||
// this code will only run when the file is run with Bun
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Alternatively, you can check for the existence of the `Bun` global. This is similar to how you'd check for the existence of the `window` variable to detect when code is being executed in a browser.
|
||||
|
||||
<Note>
|
||||
This approach will result in a type error in TypeScript unless `@types/bun` is installed. You can install it with `bun
|
||||
add -d @types/bun`.
|
||||
</Note>
|
||||
|
||||
```ts
|
||||
if (typeof Bun !== "undefined") {
|
||||
// this code will only run when the file is run with Bun
|
||||
}
|
||||
```
|
||||
|
||||
93
docs/guides/util/upgrade.mdx
Normal file
93
docs/guides/util/upgrade.mdx
Normal file
@@ -0,0 +1,93 @@
|
||||
---
|
||||
title: Upgrade Bun to the latest version
|
||||
sidebarTitle: Upgrade Bun
|
||||
mode: center
|
||||
---
|
||||
|
||||
Bun can upgrade itself using the built-in `bun upgrade` command. This is the fastest way to get the latest features and bug fixes.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun upgrade
|
||||
```
|
||||
|
||||
This downloads and installs the latest stable version of Bun, replacing the currently installed version.
|
||||
|
||||
<Note>To see the current version of Bun, run `bun --version`.</Note>
|
||||
|
||||
---
|
||||
|
||||
## Verify the upgrade
|
||||
|
||||
After upgrading, verify the new version:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun --version
|
||||
# Output: 1.x.y
|
||||
|
||||
# See the exact commit of the Bun binary
|
||||
bun --revision
|
||||
# Output: 1.x.y+abc123def
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Upgrade to canary builds
|
||||
|
||||
Canary builds are automatically released on every commit to the `main` branch. These are untested but useful for trying new features or verifying bug fixes before they're released.
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun upgrade --canary
|
||||
```
|
||||
|
||||
<Warning>Canary builds are not recommended for production use. They may contain bugs or breaking changes.</Warning>
|
||||
|
||||
---
|
||||
|
||||
## Switch back to stable
|
||||
|
||||
If you're on a canary build and want to return to the latest stable release:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun upgrade --stable
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Install a specific version
|
||||
|
||||
To install a specific version of Bun, use the install script with a version tag:
|
||||
|
||||
<Tabs>
|
||||
<Tab title="macOS & Linux">
|
||||
```bash terminal icon="terminal"
|
||||
curl -fsSL https://bun.sh/install | bash -s "bun-v1.3.3"
|
||||
```
|
||||
</Tab>
|
||||
<Tab title="Windows">
|
||||
```powershell PowerShell icon="windows"
|
||||
iex "& {$(irm https://bun.sh/install.ps1)} -Version 1.3.3"
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
---
|
||||
|
||||
## Package manager users
|
||||
|
||||
If you installed Bun via a package manager, use that package manager to upgrade instead of `bun upgrade` to avoid conflicts.
|
||||
|
||||
<Tip>
|
||||
**Homebrew users** <br />
|
||||
To avoid conflicts with Homebrew, use `brew upgrade bun` instead.
|
||||
|
||||
**Scoop users** <br />
|
||||
To avoid conflicts with Scoop, use `scoop update bun` instead.
|
||||
|
||||
</Tip>
|
||||
|
||||
---
|
||||
|
||||
## See also
|
||||
|
||||
- [Installation](/installation) — Install Bun for the first time
|
||||
- [Update packages](/pm/cli/update) — Update dependencies to latest versions
|
||||
@@ -3,6 +3,8 @@ title: "bunx"
|
||||
description: "Run packages from npm"
|
||||
---
|
||||
|
||||
import Bunx from "/snippets/cli/bunx.mdx";
|
||||
|
||||
<Note>`bunx` is an alias for `bun x`. The `bunx` CLI will be auto-installed when you install `bun`.</Note>
|
||||
|
||||
Use `bunx` to auto-install and run packages from `npm`. It's Bun's equivalent of `npx` or `yarn dlx`.
|
||||
@@ -52,6 +54,8 @@ To pass additional command-line flags and arguments through to the executable, p
|
||||
bunx my-cli --foo bar
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Shebangs
|
||||
|
||||
By default, Bun respects shebangs. If an executable is marked with `#!/usr/bin/env node`, Bun will spin up a `node` process to execute the file. However, in some cases it may be desirable to run executables using Bun's runtime, even if the executable indicates otherwise. To do so, include the `--bun` flag.
|
||||
@@ -81,3 +85,7 @@ To force bun to always be used with a script, use a shebang.
|
||||
```js dist/index.js icon="/icons/javascript.svg"
|
||||
#!/usr/bin/env bun
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
<Bunx />
|
||||
|
||||
@@ -189,7 +189,7 @@ Isolated installs are conceptually similar to pnpm, so migration should be strai
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Remove pnpm files
|
||||
$ rm -rf node_modules pnpm-lock.yaml
|
||||
rm -rf node_modules pnpm-lock.yaml
|
||||
|
||||
# Install with Bun's isolated linker
|
||||
bun install --linker isolated
|
||||
|
||||
@@ -46,6 +46,13 @@ Once added to `trustedDependencies`, install/re-install the package. Bun will re
|
||||
|
||||
The top 500 npm packages with lifecycle scripts are allowed by default. You can see the full list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt).
|
||||
|
||||
<Note>
|
||||
The default trusted dependencies list only applies to packages installed from npm. For packages from other sources
|
||||
(such as `file:`, `link:`, `git:`, or `github:` dependencies), you must explicitly add them to `trustedDependencies`
|
||||
to run their lifecycle scripts, even if the package name matches an entry in the default list. This prevents malicious
|
||||
packages from spoofing trusted package names through local file paths or git repositories.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
## `--ignore-scripts`
|
||||
|
||||
@@ -72,6 +72,7 @@ The following options are supported:
|
||||
- `username`
|
||||
- `_password` (base64 encoded password)
|
||||
- `_auth` (base64 encoded username:password, e.g. `btoa(username + ":" + password)`)
|
||||
- `email`
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/runtime/bunfig#install-registry):
|
||||
|
||||
@@ -109,3 +110,136 @@ The equivalent `bunfig.toml` option is [`install.exact`](/runtime/bunfig#install
|
||||
[install]
|
||||
exact = true
|
||||
```
|
||||
|
||||
### `ignore-scripts`: Skip lifecycle scripts
|
||||
|
||||
Prevents running lifecycle scripts during installation:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
ignore-scripts=true
|
||||
```
|
||||
|
||||
This is equivalent to using the `--ignore-scripts` flag with `bun install`.
|
||||
|
||||
### `dry-run`: Preview changes without installing
|
||||
|
||||
Shows what would be installed without actually installing:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
dry-run=true
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.dryRun`](/runtime/bunfig#install-dryrun):
|
||||
|
||||
```toml bunfig.toml icon="settings"
|
||||
[install]
|
||||
dryRun = true
|
||||
```
|
||||
|
||||
### `cache`: Configure cache directory
|
||||
|
||||
Set the cache directory path, or disable caching:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# set a custom cache directory
|
||||
cache=/path/to/cache
|
||||
|
||||
# or disable caching
|
||||
cache=false
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.cache`](/runtime/bunfig#install-cache):
|
||||
|
||||
```toml bunfig.toml icon="settings"
|
||||
[install.cache]
|
||||
# set a custom cache directory
|
||||
dir = "/path/to/cache"
|
||||
|
||||
# or disable caching
|
||||
disable = true
|
||||
```
|
||||
|
||||
### `ca` and `cafile`: Configure CA certificates
|
||||
|
||||
Configure custom CA certificates for registry connections:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# single CA certificate
|
||||
ca="-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----"
|
||||
|
||||
# multiple CA certificates
|
||||
ca[]="-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----"
|
||||
ca[]="-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----"
|
||||
|
||||
# or specify a path to a CA file
|
||||
cafile=/path/to/ca-bundle.crt
|
||||
```
|
||||
|
||||
### `omit` and `include`: Control dependency types
|
||||
|
||||
Control which dependency types are installed:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# omit dev dependencies
|
||||
omit=dev
|
||||
|
||||
# omit multiple types
|
||||
omit[]=dev
|
||||
omit[]=optional
|
||||
|
||||
# include specific types (overrides omit)
|
||||
include=dev
|
||||
```
|
||||
|
||||
Valid values: `dev`, `peer`, `optional`
|
||||
|
||||
### `install-strategy` and `node-linker`: Installation strategy
|
||||
|
||||
Control how packages are installed in `node_modules`. Bun supports two different configuration options for compatibility with different package managers.
|
||||
|
||||
**npm's `install-strategy`:**
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# flat node_modules structure (default)
|
||||
install-strategy=hoisted
|
||||
|
||||
# symlinked structure
|
||||
install-strategy=linked
|
||||
```
|
||||
|
||||
**pnpm/yarn's `node-linker`:**
|
||||
|
||||
The `node-linker` option controls the installation mode. Bun supports values from both pnpm and yarn:
|
||||
|
||||
| Value | Description | Accepted by |
|
||||
| -------------- | ----------------------------------------------- | ----------- |
|
||||
| `isolated` | Symlinked structure with isolated dependencies | pnpm |
|
||||
| `hoisted` | Flat node_modules structure | pnpm |
|
||||
| `pnpm` | Symlinked structure (same as `isolated`) | yarn |
|
||||
| `node-modules` | Flat node_modules structure (same as `hoisted`) | yarn |
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# symlinked/isolated mode
|
||||
node-linker=isolated
|
||||
node-linker=pnpm
|
||||
|
||||
# flat/hoisted mode
|
||||
node-linker=hoisted
|
||||
node-linker=node-modules
|
||||
```
|
||||
|
||||
### `public-hoist-pattern` and `hoist-pattern`: Control hoisting
|
||||
|
||||
Control which packages are hoisted to the root `node_modules`:
|
||||
|
||||
```ini .npmrc icon="npm"
|
||||
# packages matching this pattern will be hoisted to the root
|
||||
public-hoist-pattern=*eslint*
|
||||
|
||||
# multiple patterns
|
||||
public-hoist-pattern[]=*eslint*
|
||||
public-hoist-pattern[]=*prettier*
|
||||
|
||||
# control general hoisting behavior
|
||||
hoist-pattern=*
|
||||
```
|
||||
|
||||
@@ -14,7 +14,7 @@ It is strongly recommended to use [PowerShell 7 (`pwsh.exe`)](https://learn.micr
|
||||
By default, running unverified scripts are blocked.
|
||||
|
||||
```ps1
|
||||
> Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
```
|
||||
|
||||
### System Dependencies
|
||||
@@ -22,7 +22,7 @@ By default, running unverified scripts are blocked.
|
||||
Bun v1.1 or later. We use Bun to run it's own code generators.
|
||||
|
||||
```ps1
|
||||
> irm bun.sh/install.ps1 | iex
|
||||
irm bun.sh/install.ps1 | iex
|
||||
```
|
||||
|
||||
[Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload. While installing, make sure to install Git as well, if Git for Windows is not already installed.
|
||||
@@ -30,7 +30,7 @@ Bun v1.1 or later. We use Bun to run it's own code generators.
|
||||
Visual Studio can be installed graphically using the wizard or through WinGet:
|
||||
|
||||
```ps1
|
||||
> winget install "Visual Studio Community 2022" --override "--add Microsoft.VisualStudio.Workload.NativeDesktop Microsoft.VisualStudio.Component.Git " -s msstore
|
||||
winget install "Visual Studio Community 2022" --override "--add Microsoft.VisualStudio.Workload.NativeDesktop Microsoft.VisualStudio.Component.Git " -s msstore
|
||||
```
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
@@ -48,10 +48,10 @@ After Visual Studio, you need the following:
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
|
||||
|
||||
```ps1 Scoop
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl sccache
|
||||
irm https://get.scoop.sh | iex
|
||||
scoop install nodejs-lts go rust nasm ruby perl ccache
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop install llvm@19.1.7
|
||||
scoop install llvm@19.1.7
|
||||
```
|
||||
|
||||
<Note>
|
||||
@@ -63,19 +63,19 @@ After Visual Studio, you need the following:
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
```ps1 Scoop
|
||||
> scoop install make cygwin python
|
||||
scoop install make cygwin python
|
||||
```
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
> .\scripts\vs-shell.ps1
|
||||
.\scripts\vs-shell.ps1
|
||||
```
|
||||
|
||||
To verify, you can check for an MSVC-only command line such as `mt.exe`
|
||||
|
||||
```ps1
|
||||
> Get-Command mt
|
||||
Get-Command mt
|
||||
```
|
||||
|
||||
<Note>
|
||||
@@ -86,16 +86,16 @@ To verify, you can check for an MSVC-only command line such as `mt.exe`
|
||||
## Building
|
||||
|
||||
```ps1
|
||||
> bun run build
|
||||
bun run build
|
||||
|
||||
# after the initial `bun run build` you can use the following to build
|
||||
> ninja -Cbuild/debug
|
||||
ninja -Cbuild/debug
|
||||
```
|
||||
|
||||
If this was successful, you should have a `bun-debug.exe` in the `build/debug` folder.
|
||||
|
||||
```ps1
|
||||
> .\build\debug\bun-debug.exe --revision
|
||||
.\build\debug\bun-debug.exe --revision
|
||||
```
|
||||
|
||||
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build\debug` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
|
||||
@@ -111,15 +111,15 @@ You can run the test suite either using `bun test <path>` or by using the wrappe
|
||||
|
||||
```ps1
|
||||
# Setup
|
||||
> bun i --cwd packages\bun-internal-test
|
||||
bun i --cwd packages\bun-internal-test
|
||||
|
||||
# Run the entire test suite with reporter
|
||||
# the package.json script "test" uses "build/debug/bun-debug.exe" by default
|
||||
> bun run test
|
||||
bun run test
|
||||
|
||||
# Run an individual test file:
|
||||
> bun-debug test node\fs
|
||||
> bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
bun-debug test node\fs
|
||||
bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -28,23 +28,23 @@ Using your system's package manager, install Bun's dependencies:
|
||||
<CodeGroup>
|
||||
|
||||
```bash macOS (Homebrew)
|
||||
$ brew install automake cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby sccache
|
||||
brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
```
|
||||
|
||||
```bash Ubuntu/Debian
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
sudo apt install curl wget lsb-release software-properties-common cargo cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
```
|
||||
|
||||
```bash Arch
|
||||
$ sudo pacman -S base-devel cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
sudo pacman -S base-devel cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash Fedora
|
||||
$ sudo dnf install cargo clang19 llvm19 lld19 cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
sudo dnf install cargo clang19 llvm19 lld19 cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash openSUSE Tumbleweed
|
||||
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
|
||||
sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
@@ -56,59 +56,42 @@ Before starting, you will need to already have a release build of Bun installed,
|
||||
<CodeGroup>
|
||||
|
||||
```bash Native
|
||||
$ curl -fsSL https://bun.com/install | bash
|
||||
curl -fsSL https://bun.com/install | bash
|
||||
```
|
||||
|
||||
```bash npm
|
||||
$ npm install -g bun
|
||||
npm install -g bun
|
||||
```
|
||||
|
||||
```bash Homebrew
|
||||
$ brew tap oven-sh/bun
|
||||
$ brew install bun
|
||||
brew tap oven-sh/bun
|
||||
brew install bun
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### Optional: Install `sccache`
|
||||
### Optional: Install `ccache`
|
||||
|
||||
sccache is used to cache compilation artifacts, significantly speeding up builds. It must be installed with S3 support:
|
||||
ccache is used to cache compilation artifacts, significantly speeding up builds:
|
||||
|
||||
```bash
|
||||
# For macOS
|
||||
$ brew install sccache
|
||||
brew install ccache
|
||||
|
||||
# For Linux. Note that the version in your package manager may not have S3 support.
|
||||
$ cargo install sccache --features=s3
|
||||
# For Ubuntu/Debian
|
||||
sudo apt install ccache
|
||||
|
||||
# For Arch
|
||||
sudo pacman -S ccache
|
||||
|
||||
# For Fedora
|
||||
sudo dnf install ccache
|
||||
|
||||
# For openSUSE
|
||||
sudo zypper install ccache
|
||||
```
|
||||
|
||||
This will install `sccache` with S3 support. Our build scripts will automatically detect and use `sccache` with our shared S3 cache. **Note**: Not all versions of `sccache` are compiled with S3 support, hence we recommend installing it via `cargo`.
|
||||
|
||||
#### Registering AWS Credentials for `sccache` (Core Developers Only)
|
||||
|
||||
Core developers have write access to the shared S3 cache. To enable write access, you must log in with AWS credentials. The easiest way to do this is to use the [`aws` CLI](https://aws.amazon.com/cli/) and invoke [`aws configure` to provide your AWS security info](https://docs.aws.amazon.com/cli/latest/reference/configure/).
|
||||
|
||||
The `cmake` scripts should automatically detect your AWS credentials from the environment or the `~/.aws/credentials` file.
|
||||
|
||||
<details>
|
||||
<summary>Logging in to the `aws` CLI</summary>
|
||||
|
||||
1. Install the AWS CLI by following [the official guide](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html).
|
||||
2. Log in to your AWS account console. A team member should provide you with your credentials.
|
||||
3. Click your name in the top right > Security credentials.
|
||||
4. Scroll to "Access keys" and create a new access key.
|
||||
5. Run `aws configure` in your terminal and provide the access key ID and secret access key when prompted.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Common Issues You May Encounter</summary>
|
||||
|
||||
- To confirm that the cache is being used, you can use the `sccache --show-stats` command right after a build. This will expose very useful statistics, including cache hits/misses.
|
||||
- If you have multiple AWS profiles configured, ensure that the correct profile is set in the `AWS_PROFILE` environment variable.
|
||||
- `sccache` follows a server-client model. If you run into weird issues where `sccache` refuses to use S3, even though you have AWS credentials configured, try killing any running `sccache` servers with `sccache --stop-server` and then re-running the build.
|
||||
|
||||
</details>
|
||||
Our build scripts will automatically detect and use `ccache` if available. You can check cache statistics with `ccache --show-stats`.
|
||||
|
||||
## Install LLVM
|
||||
|
||||
@@ -117,24 +100,24 @@ Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to m
|
||||
<CodeGroup>
|
||||
|
||||
```bash macOS (Homebrew)
|
||||
$ brew install llvm@19
|
||||
brew install llvm@19
|
||||
```
|
||||
|
||||
```bash Ubuntu/Debian
|
||||
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
|
||||
# LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
|
||||
```
|
||||
|
||||
```bash Arch
|
||||
$ sudo pacman -S llvm clang lld
|
||||
sudo pacman -S llvm clang lld
|
||||
```
|
||||
|
||||
```bash Fedora
|
||||
$ sudo dnf install llvm clang lld-devel
|
||||
sudo dnf install llvm clang lld-devel
|
||||
```
|
||||
|
||||
```bash openSUSE Tumbleweed
|
||||
$ sudo zypper install clang19 lld19 llvm19
|
||||
sudo zypper install clang19 lld19 llvm19
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
@@ -144,7 +127,7 @@ If none of the above solutions apply, you will have to install it [manually](htt
|
||||
Make sure Clang/LLVM 19 is in your path:
|
||||
|
||||
```bash
|
||||
$ which clang-19
|
||||
which clang-19
|
||||
```
|
||||
|
||||
If not, run this to manually add it:
|
||||
@@ -154,12 +137,12 @@ If not, run this to manually add it:
|
||||
```bash macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
# use path+="$(brew --prefix llvm@19)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@19)/bin:$PATH"
|
||||
export PATH="$(brew --prefix llvm@19)/bin:$PATH"
|
||||
```
|
||||
|
||||
```bash Arch
|
||||
# use fish_add_path if you're using fish
|
||||
$ export PATH="$PATH:/usr/lib/llvm19/bin"
|
||||
export PATH="$PATH:/usr/lib/llvm19/bin"
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
@@ -179,7 +162,7 @@ bun run build
|
||||
The binary will be located at `./build/debug/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun.
|
||||
|
||||
```bash
|
||||
$ build/debug/bun-debug --version
|
||||
build/debug/bun-debug --version
|
||||
x.y.z_debug
|
||||
```
|
||||
|
||||
@@ -278,17 +261,17 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
|
||||
|
||||
```bash
|
||||
# Clone WebKit into ./vendor/WebKit
|
||||
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
bun run build:local
|
||||
@@ -339,20 +322,20 @@ is not able to compile a simple test program.
|
||||
To fix the error, we need to update the GCC version to 11. To do this, we'll need to check if the latest version is available in the distribution's official repositories or use a third-party repository that provides GCC 11 packages. Here are general steps:
|
||||
|
||||
```bash
|
||||
$ sudo apt update
|
||||
$ sudo apt install gcc-11 g++-11
|
||||
sudo apt update
|
||||
sudo apt install gcc-11 g++-11
|
||||
# If the above command fails with `Unable to locate package gcc-11` we need
|
||||
# to add the APT repository
|
||||
$ sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||
sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||
# Now run `apt install` again
|
||||
$ sudo apt install gcc-11 g++-11
|
||||
sudo apt install gcc-11 g++-11
|
||||
```
|
||||
|
||||
Now, we need to set GCC 11 as the default compiler:
|
||||
|
||||
```bash
|
||||
$ sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
|
||||
$ sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 100
|
||||
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
|
||||
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 100
|
||||
```
|
||||
|
||||
### libarchive
|
||||
@@ -360,7 +343,7 @@ $ sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 100
|
||||
If you see an error on macOS when compiling `libarchive`, run:
|
||||
|
||||
```bash
|
||||
$ brew install pkg-config
|
||||
brew install pkg-config
|
||||
```
|
||||
|
||||
### macOS `library not found for -lSystem`
|
||||
@@ -368,7 +351,7 @@ $ brew install pkg-config
|
||||
If you see this error when compiling, run:
|
||||
|
||||
```bash
|
||||
$ xcode-select --install
|
||||
xcode-select --install
|
||||
```
|
||||
|
||||
### Cannot find `libatomic.a`
|
||||
|
||||
452
docs/runtime/archive.mdx
Normal file
452
docs/runtime/archive.mdx
Normal file
@@ -0,0 +1,452 @@
|
||||
---
|
||||
title: Archive
|
||||
description: Create and extract tar archives with Bun's fast native implementation
|
||||
---
|
||||
|
||||
Bun provides a fast, native implementation for working with tar archives through `Bun.Archive`. It supports creating archives from in-memory data, extracting archives to disk, and reading archive contents without extraction.
|
||||
|
||||
## Quickstart
|
||||
|
||||
**Create an archive from files:**
|
||||
|
||||
```ts
|
||||
const archive = new Bun.Archive({
|
||||
"hello.txt": "Hello, World!",
|
||||
"data.json": JSON.stringify({ foo: "bar" }),
|
||||
"nested/file.txt": "Nested content",
|
||||
});
|
||||
|
||||
// Write to disk
|
||||
await Bun.write("bundle.tar", archive);
|
||||
```
|
||||
|
||||
**Extract an archive:**
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const entryCount = await archive.extract("./output");
|
||||
console.log(`Extracted ${entryCount} entries`);
|
||||
```
|
||||
|
||||
**Read archive contents without extracting:**
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const files = await archive.files();
|
||||
|
||||
for (const [path, file] of files) {
|
||||
console.log(`${path}: ${await file.text()}`);
|
||||
}
|
||||
```
|
||||
|
||||
## Creating Archives
|
||||
|
||||
Use `new Bun.Archive()` to create an archive from an object where keys are file paths and values are file contents. By default, archives are uncompressed:
|
||||
|
||||
```ts
|
||||
// Creates an uncompressed tar archive (default)
|
||||
const archive = new Bun.Archive({
|
||||
"README.md": "# My Project",
|
||||
"src/index.ts": "console.log('Hello');",
|
||||
"package.json": JSON.stringify({ name: "my-project" }),
|
||||
});
|
||||
```
|
||||
|
||||
File contents can be:
|
||||
|
||||
- **Strings** - Text content
|
||||
- **Blobs** - Binary data
|
||||
- **ArrayBufferViews** (e.g., `Uint8Array`) - Raw bytes
|
||||
- **ArrayBuffers** - Raw binary data
|
||||
|
||||
```ts
|
||||
const data = "binary data";
|
||||
const arrayBuffer = new ArrayBuffer(8);
|
||||
|
||||
const archive = new Bun.Archive({
|
||||
"text.txt": "Plain text",
|
||||
"blob.bin": new Blob([data]),
|
||||
"bytes.bin": new Uint8Array([1, 2, 3, 4]),
|
||||
"buffer.bin": arrayBuffer,
|
||||
});
|
||||
```
|
||||
|
||||
### Writing Archives to Disk
|
||||
|
||||
Use `Bun.write()` to write an archive to disk:
|
||||
|
||||
```ts
|
||||
// Write uncompressed tar (default)
|
||||
const archive = new Bun.Archive({
|
||||
"file1.txt": "content1",
|
||||
"file2.txt": "content2",
|
||||
});
|
||||
await Bun.write("output.tar", archive);
|
||||
|
||||
// Write gzipped tar
|
||||
const compressed = new Bun.Archive({ "src/index.ts": "console.log('Hello');" }, { compress: "gzip" });
|
||||
await Bun.write("output.tar.gz", compressed);
|
||||
```
|
||||
|
||||
### Getting Archive Bytes
|
||||
|
||||
Get the archive data as bytes or a Blob:
|
||||
|
||||
```ts
|
||||
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
|
||||
|
||||
// As Uint8Array
|
||||
const bytes = await archive.bytes();
|
||||
|
||||
// As Blob
|
||||
const blob = await archive.blob();
|
||||
|
||||
// With gzip compression (set at construction)
|
||||
const gzipped = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
|
||||
const gzippedBytes = await gzipped.bytes();
|
||||
const gzippedBlob = await gzipped.blob();
|
||||
```
|
||||
|
||||
## Extracting Archives
|
||||
|
||||
### From Existing Archive Data
|
||||
|
||||
Create an archive from existing tar/tar.gz data:
|
||||
|
||||
```ts
|
||||
// From a file
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archiveFromFile = new Bun.Archive(tarball);
|
||||
```
|
||||
|
||||
```ts
|
||||
// From a fetch response
|
||||
const response = await fetch("https://example.com/archive.tar.gz");
|
||||
const archiveFromFetch = new Bun.Archive(await response.blob());
|
||||
```
|
||||
|
||||
### Extracting to Disk
|
||||
|
||||
Use `.extract()` to write all files to a directory:
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const count = await archive.extract("./extracted");
|
||||
console.log(`Extracted ${count} entries`);
|
||||
```
|
||||
|
||||
The target directory is created automatically if it doesn't exist. Existing files are overwritten. The returned count includes files, directories, and symlinks (on POSIX systems).
|
||||
|
||||
**Note**: On Windows, symbolic links in archives are always skipped during extraction. Bun does not attempt to create them regardless of privilege level. On Linux and macOS, symlinks are extracted normally.
|
||||
|
||||
**Security note**: Bun.Archive validates paths during extraction, rejecting absolute paths (POSIX `/`, Windows drive letters like `C:\` or `C:/`, and UNC paths like `\\server\share`). Path traversal components (`..`) are normalized away (e.g., `dir/sub/../file` becomes `dir/file`) to prevent directory escape attacks.
|
||||
|
||||
### Filtering Extracted Files
|
||||
|
||||
Use glob patterns to extract only specific files. Patterns are matched against archive entry paths normalized to use forward slashes (`/`). Positive patterns specify what to include, and negative patterns (prefixed with `!`) specify what to exclude. Negative patterns are applied after positive patterns, so **using only negative patterns will match nothing** (you must include a positive pattern like `**` first):
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
|
||||
// Extract only TypeScript files
|
||||
const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" });
|
||||
|
||||
// Extract files from multiple directories
|
||||
const multiCount = await archive.extract("./extracted", {
|
||||
glob: ["src/**", "lib/**"],
|
||||
});
|
||||
```
|
||||
|
||||
Use negative patterns (prefixed with `!`) to exclude files. When mixing positive and negative patterns, entries must match at least one positive pattern and not match any negative pattern:
|
||||
|
||||
```ts
|
||||
// Extract everything except node_modules
|
||||
const distCount = await archive.extract("./extracted", {
|
||||
glob: ["**", "!node_modules/**"],
|
||||
});
|
||||
|
||||
// Extract source files but exclude tests
|
||||
const srcCount = await archive.extract("./extracted", {
|
||||
glob: ["src/**", "!**/*.test.ts", "!**/__tests__/**"],
|
||||
});
|
||||
```
|
||||
|
||||
## Reading Archive Contents
|
||||
|
||||
### Get All Files
|
||||
|
||||
Use `.files()` to get archive contents as a `Map` of `File` objects without extracting to disk. Unlike `extract()` which processes all entry types, `files()` returns only regular files (no directories):
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const files = await archive.files();
|
||||
|
||||
for (const [path, file] of files) {
|
||||
console.log(`${path}: ${file.size} bytes`);
|
||||
console.log(await file.text());
|
||||
}
|
||||
```
|
||||
|
||||
Each `File` object includes:
|
||||
|
||||
- `name` - The file path within the archive (always uses forward slashes `/` as separators)
|
||||
- `size` - File size in bytes
|
||||
- `lastModified` - Modification timestamp
|
||||
- Standard `Blob` methods: `text()`, `arrayBuffer()`, `stream()`, etc.
|
||||
|
||||
**Note**: `files()` loads file contents into memory. For large archives, consider using `extract()` to write directly to disk instead.
|
||||
|
||||
### Error Handling
|
||||
|
||||
Archive operations can fail due to corrupted data, I/O errors, or invalid paths. Use try/catch to handle these cases:
|
||||
|
||||
```ts
|
||||
try {
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const count = await archive.extract("./output");
|
||||
console.log(`Extracted ${count} entries`);
|
||||
} catch (e: unknown) {
|
||||
if (e instanceof Error) {
|
||||
const error = e as Error & { code?: string };
|
||||
if (error.code === "EACCES") {
|
||||
console.error("Permission denied");
|
||||
} else if (error.code === "ENOSPC") {
|
||||
console.error("Disk full");
|
||||
} else {
|
||||
console.error("Archive error:", error.message);
|
||||
}
|
||||
} else {
|
||||
console.error("Archive error:", String(e));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Common error scenarios:
|
||||
|
||||
- **Corrupted/truncated archives** - `new Archive()` loads the archive data; errors may be deferred until read/extract operations
|
||||
- **Permission denied** - `extract()` throws if the target directory is not writable
|
||||
- **Disk full** - `extract()` throws if there's insufficient space
|
||||
- **Invalid paths** - Operations throw for malformed file paths
|
||||
|
||||
The count returned by `extract()` includes all successfully written entries (files, directories, and symlinks on POSIX systems).
|
||||
|
||||
**Security note**: Bun.Archive automatically validates paths during extraction. Absolute paths (POSIX `/`, Windows drive letters, UNC paths) and unsafe symlink targets are rejected. Path traversal components (`..`) are normalized away to prevent directory escape.
|
||||
|
||||
For additional security with untrusted archives, you can enumerate and validate paths before extraction:
|
||||
|
||||
```ts
|
||||
const archive = new Bun.Archive(untrustedData);
|
||||
const files = await archive.files();
|
||||
|
||||
// Optional: Custom validation for additional checks
|
||||
for (const [path] of files) {
|
||||
// Example: Reject hidden files
|
||||
if (path.startsWith(".") || path.includes("/.")) {
|
||||
throw new Error(`Hidden file rejected: ${path}`);
|
||||
}
|
||||
// Example: Whitelist specific directories
|
||||
if (!path.startsWith("src/") && !path.startsWith("lib/")) {
|
||||
throw new Error(`Unexpected path: ${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract to a controlled destination
|
||||
await archive.extract("./safe-output");
|
||||
```
|
||||
|
||||
When using `files()` with a glob pattern, an empty `Map` is returned if no files match:
|
||||
|
||||
```ts
|
||||
const matches = await archive.files("*.nonexistent");
|
||||
if (matches.size === 0) {
|
||||
console.log("No matching files found");
|
||||
}
|
||||
```
|
||||
|
||||
### Filtering with Glob Patterns
|
||||
|
||||
Pass a glob pattern to filter which files are returned:
|
||||
|
||||
```ts
|
||||
// Get only TypeScript files
|
||||
const tsFiles = await archive.files("**/*.ts");
|
||||
|
||||
// Get files in src directory
|
||||
const srcFiles = await archive.files("src/*");
|
||||
|
||||
// Get all JSON files (recursive)
|
||||
const jsonFiles = await archive.files("**/*.json");
|
||||
|
||||
// Get multiple file types with array of patterns
|
||||
const codeFiles = await archive.files(["**/*.ts", "**/*.js"]);
|
||||
```
|
||||
|
||||
Supported glob patterns (subset of [Bun.Glob](/docs/api/glob) syntax):
|
||||
|
||||
- `*` - Match any characters except `/`
|
||||
- `**` - Match any characters including `/`
|
||||
- `?` - Match single character
|
||||
- `[abc]` - Match character set
|
||||
- `{a,b}` - Match alternatives
|
||||
- `!pattern` - Exclude files matching pattern (negation). Must be combined with positive patterns; using only negative patterns matches nothing.
|
||||
|
||||
See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and advanced patterns.
|
||||
|
||||
## Compression
|
||||
|
||||
Bun.Archive creates uncompressed tar archives by default. Use `{ compress: "gzip" }` to enable gzip compression:
|
||||
|
||||
```ts
|
||||
// Default: uncompressed tar
|
||||
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
|
||||
|
||||
// Reading: automatically detects gzip
|
||||
const gzippedTarball = await Bun.file("archive.tar.gz").bytes();
|
||||
const readArchive = new Bun.Archive(gzippedTarball);
|
||||
|
||||
// Enable gzip compression
|
||||
const compressed = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
|
||||
|
||||
// Gzip with custom level (1-12)
|
||||
const maxCompression = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 12 });
|
||||
```
|
||||
|
||||
The options accept:
|
||||
|
||||
- No options or `undefined` - Uncompressed tar (default)
|
||||
- `{ compress: "gzip" }` - Enable gzip compression at level 6
|
||||
- `{ compress: "gzip", level: number }` - Gzip with custom level 1-12 (1 = fastest, 12 = smallest)
|
||||
|
||||
## Examples
|
||||
|
||||
### Bundle Project Files
|
||||
|
||||
```ts
|
||||
import { Glob } from "bun";
|
||||
|
||||
// Collect source files
|
||||
const files: Record<string, string> = {};
|
||||
const glob = new Glob("src/**/*.ts");
|
||||
|
||||
for await (const path of glob.scan(".")) {
|
||||
// Normalize path separators to forward slashes for cross-platform compatibility
|
||||
const archivePath = path.replaceAll("\\", "/");
|
||||
files[archivePath] = await Bun.file(path).text();
|
||||
}
|
||||
|
||||
// Add package.json
|
||||
files["package.json"] = await Bun.file("package.json").text();
|
||||
|
||||
// Create compressed archive and write to disk
|
||||
const archive = new Bun.Archive(files, { compress: "gzip" });
|
||||
await Bun.write("bundle.tar.gz", archive);
|
||||
```
|
||||
|
||||
### Extract and Process npm Package
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz");
|
||||
const archive = new Bun.Archive(await response.blob());
|
||||
|
||||
// Get package.json
|
||||
const files = await archive.files("package/package.json");
|
||||
const packageJson = files.get("package/package.json");
|
||||
|
||||
if (packageJson) {
|
||||
const pkg = JSON.parse(await packageJson.text());
|
||||
console.log(`Package: ${pkg.name}@${pkg.version}`);
|
||||
}
|
||||
```
|
||||
|
||||
### Create Archive from Directory
|
||||
|
||||
```ts
|
||||
import { readdir } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
async function archiveDirectory(dir: string, compress = false): Promise<Bun.Archive> {
|
||||
const files: Record<string, Blob> = {};
|
||||
|
||||
async function walk(currentDir: string, prefix: string = "") {
|
||||
const entries = await readdir(currentDir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = join(currentDir, entry.name);
|
||||
const archivePath = prefix ? `${prefix}/${entry.name}` : entry.name;
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await walk(fullPath, archivePath);
|
||||
} else {
|
||||
files[archivePath] = Bun.file(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await walk(dir);
|
||||
return new Bun.Archive(files, compress ? { compress: "gzip" } : undefined);
|
||||
}
|
||||
|
||||
const archive = await archiveDirectory("./my-project", true);
|
||||
await Bun.write("my-project.tar.gz", archive);
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
> **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions.
|
||||
|
||||
```ts
|
||||
type ArchiveInput =
|
||||
| Record<string, string | Blob | Bun.ArrayBufferView | ArrayBufferLike>
|
||||
| Blob
|
||||
| Bun.ArrayBufferView
|
||||
| ArrayBufferLike;
|
||||
|
||||
type ArchiveOptions = {
|
||||
/** Compression algorithm. Currently only "gzip" is supported. */
|
||||
compress?: "gzip";
|
||||
/** Compression level 1-12 (default 6 when gzip is enabled). */
|
||||
level?: number;
|
||||
};
|
||||
|
||||
interface ArchiveExtractOptions {
|
||||
/** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */
|
||||
glob?: string | readonly string[];
|
||||
}
|
||||
|
||||
class Archive {
|
||||
/**
|
||||
* Create an Archive from input data
|
||||
* @param data - Files to archive (as object) or existing archive data (as bytes/blob)
|
||||
* @param options - Compression options. Uncompressed by default.
|
||||
* Pass { compress: "gzip" } to enable compression.
|
||||
*/
|
||||
constructor(data: ArchiveInput, options?: ArchiveOptions);
|
||||
|
||||
/**
|
||||
* Extract archive to a directory
|
||||
* @returns Number of entries extracted (files, directories, and symlinks)
|
||||
*/
|
||||
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get archive as a Blob (uses compression setting from constructor)
|
||||
*/
|
||||
blob(): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Get archive as a Uint8Array (uses compression setting from constructor)
|
||||
*/
|
||||
bytes(): Promise<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* Get archive contents as File objects (regular files only, no directories)
|
||||
*/
|
||||
files(glob?: string | readonly string[]): Promise<Map<string, File>>;
|
||||
}
|
||||
```
|
||||
@@ -107,12 +107,34 @@ Bun supports the following loaders:
|
||||
|
||||
### `telemetry`
|
||||
|
||||
The `telemetry` field permit to enable/disable the analytics records. Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. By default the telemetry is enabled. Equivalent of `DO_NOT_TRACK` env variable.
|
||||
The `telemetry` field is used to enable/disable analytics. By default, telemetry is enabled. This is equivalent to the `DO_NOT_TRACK` environment variable.
|
||||
|
||||
Currently we do not collect telemetry and this setting is only used for enabling/disabling anonymous crash reports, but in the future we plan to collect information like which Bun APIs are used most or how long `bun build` takes.
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
telemetry = false
|
||||
```
|
||||
|
||||
### `env`
|
||||
|
||||
Configure automatic `.env` file loading. By default, Bun automatically loads `.env` files. To disable this behavior:
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
# Disable automatic .env file loading
|
||||
env = false
|
||||
```
|
||||
|
||||
You can also use object syntax with the `file` property:
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
[env]
|
||||
file = false
|
||||
```
|
||||
|
||||
This is useful in production environments or CI/CD pipelines where you want to rely solely on system environment variables.
|
||||
|
||||
Note: Explicitly provided environment files via `--env-file` will still be loaded even when default loading is disabled.
|
||||
|
||||
### `console`
|
||||
|
||||
Configure console output behavior.
|
||||
|
||||
@@ -315,6 +315,109 @@ if (typeof Bun !== "undefined") {
|
||||
|
||||
---
|
||||
|
||||
## Terminal (PTY) support
|
||||
|
||||
For interactive terminal applications, you can spawn a subprocess with a pseudo-terminal (PTY) attached using the `terminal` option. This makes the subprocess think it's running in a real terminal, enabling features like colored output, cursor movement, and interactive prompts.
|
||||
|
||||
```ts
|
||||
const proc = Bun.spawn(["bash"], {
|
||||
terminal: {
|
||||
cols: 80,
|
||||
rows: 24,
|
||||
data(terminal, data) {
|
||||
// Called when data is received from the terminal
|
||||
process.stdout.write(data);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Write to the terminal
|
||||
proc.terminal.write("echo hello\n");
|
||||
|
||||
// Wait for the process to exit
|
||||
await proc.exited;
|
||||
|
||||
// Close the terminal
|
||||
proc.terminal.close();
|
||||
```
|
||||
|
||||
When the `terminal` option is provided:
|
||||
|
||||
- The subprocess sees `process.stdout.isTTY` as `true`
|
||||
- `stdin`, `stdout`, and `stderr` are all connected to the terminal
|
||||
- `proc.stdin`, `proc.stdout`, and `proc.stderr` return `null` — use the terminal instead
|
||||
- Access the terminal via `proc.terminal`
|
||||
|
||||
### Terminal options
|
||||
|
||||
| Option | Description | Default |
|
||||
| ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------ |
|
||||
| `cols` | Number of columns | `80` |
|
||||
| `rows` | Number of rows | `24` |
|
||||
| `name` | Terminal type for PTY configuration (set `TERM` env var separately via `env` option) | `"xterm-256color"` |
|
||||
| `data` | Callback when data is received `(terminal, data) => void` | — |
|
||||
| `exit` | Callback when PTY stream closes (EOF or error). `exitCode` is PTY lifecycle status (0=EOF, 1=error), not subprocess exit code. Use `proc.exited` for process exit. | — |
|
||||
| `drain` | Callback when ready for more data `(terminal) => void` | — |
|
||||
|
||||
### Terminal methods
|
||||
|
||||
The `Terminal` object returned by `proc.terminal` has the following methods:
|
||||
|
||||
```ts
|
||||
// Write data to the terminal
|
||||
proc.terminal.write("echo hello\n");
|
||||
|
||||
// Resize the terminal
|
||||
proc.terminal.resize(120, 40);
|
||||
|
||||
// Set raw mode (disable line buffering and echo)
|
||||
proc.terminal.setRawMode(true);
|
||||
|
||||
// Keep event loop alive while terminal is open
|
||||
proc.terminal.ref();
|
||||
proc.terminal.unref();
|
||||
|
||||
// Close the terminal
|
||||
proc.terminal.close();
|
||||
```
|
||||
|
||||
### Reusable Terminal
|
||||
|
||||
You can create a terminal independently and reuse it across multiple subprocesses:
|
||||
|
||||
```ts
|
||||
await using terminal = new Bun.Terminal({
|
||||
cols: 80,
|
||||
rows: 24,
|
||||
data(term, data) {
|
||||
process.stdout.write(data);
|
||||
},
|
||||
});
|
||||
|
||||
// Spawn first process
|
||||
const proc1 = Bun.spawn(["echo", "first"], { terminal });
|
||||
await proc1.exited;
|
||||
|
||||
// Reuse terminal for another process
|
||||
const proc2 = Bun.spawn(["echo", "second"], { terminal });
|
||||
await proc2.exited;
|
||||
|
||||
// Terminal is closed automatically by `await using`
|
||||
```
|
||||
|
||||
When passing an existing `Terminal` object:
|
||||
|
||||
- The terminal can be reused across multiple spawns
|
||||
- You control when to close the terminal
|
||||
- The `exit` callback fires when you call `terminal.close()`, not when each subprocess exits
|
||||
- Use `proc.exited` to detect individual subprocess exits
|
||||
|
||||
This is useful for running multiple commands in sequence through the same terminal session.
|
||||
|
||||
<Note>Terminal support is only available on POSIX systems (Linux, macOS). It is not available on Windows.</Note>
|
||||
|
||||
---
|
||||
|
||||
## Blocking API (`Bun.spawnSync()`)
|
||||
|
||||
Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways.
|
||||
@@ -407,6 +510,7 @@ namespace SpawnOptions {
|
||||
timeout?: number;
|
||||
killSignal?: string | number;
|
||||
maxBuffer?: number;
|
||||
terminal?: TerminalOptions; // PTY support (POSIX only)
|
||||
}
|
||||
|
||||
type Readable =
|
||||
@@ -435,10 +539,11 @@ namespace SpawnOptions {
|
||||
}
|
||||
|
||||
interface Subprocess extends AsyncDisposable {
|
||||
readonly stdin: FileSink | number | undefined;
|
||||
readonly stdout: ReadableStream<Uint8Array> | number | undefined;
|
||||
readonly stderr: ReadableStream<Uint8Array> | number | undefined;
|
||||
readonly readable: ReadableStream<Uint8Array> | number | undefined;
|
||||
readonly stdin: FileSink | number | undefined | null;
|
||||
readonly stdout: ReadableStream<Uint8Array<ArrayBuffer>> | number | undefined | null;
|
||||
readonly stderr: ReadableStream<Uint8Array<ArrayBuffer>> | number | undefined | null;
|
||||
readonly readable: ReadableStream<Uint8Array<ArrayBuffer>> | number | undefined | null;
|
||||
readonly terminal: Terminal | undefined;
|
||||
readonly pid: number;
|
||||
readonly exited: Promise<number>;
|
||||
readonly exitCode: number | null;
|
||||
@@ -465,6 +570,28 @@ interface SyncSubprocess {
|
||||
pid: number;
|
||||
}
|
||||
|
||||
interface TerminalOptions {
|
||||
cols?: number;
|
||||
rows?: number;
|
||||
name?: string;
|
||||
data?: (terminal: Terminal, data: Uint8Array<ArrayBuffer>) => void;
|
||||
/** Called when PTY stream closes (EOF or error). exitCode is PTY lifecycle status (0=EOF, 1=error), not subprocess exit code. */
|
||||
exit?: (terminal: Terminal, exitCode: number, signal: string | null) => void;
|
||||
drain?: (terminal: Terminal) => void;
|
||||
}
|
||||
|
||||
interface Terminal extends AsyncDisposable {
|
||||
readonly stdin: number;
|
||||
readonly stdout: number;
|
||||
readonly closed: boolean;
|
||||
write(data: string | BufferSource): number;
|
||||
resize(cols: number, rows: number): void;
|
||||
setRawMode(enabled: boolean): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
close(): void;
|
||||
}
|
||||
|
||||
interface ResourceUsage {
|
||||
contextSwitches: {
|
||||
voluntary: number;
|
||||
|
||||
@@ -358,6 +358,8 @@ Bun represents [pointers](<https://en.wikipedia.org/wiki/Pointer_(computer_progr
|
||||
|
||||
**Why not `BigInt`?** `BigInt` is slower. JavaScript engines allocate a separate `BigInt` which means they can't fit into a regular JavaScript value. If you pass a `BigInt` to a function, it will be converted to a `number`
|
||||
|
||||
**Windows Note**: The Windows API type HANDLE does not represent a virtual address, and using `ptr` for it will _not_ work as expected. Use `u64` to safely represent HANDLE values.
|
||||
|
||||
</Accordion>
|
||||
|
||||
To convert from a `TypedArray` to a pointer:
|
||||
|
||||
@@ -193,15 +193,17 @@ This is the maximum amount of time a connection is allowed to be idle before the
|
||||
Thus far, the examples on this page have used the explicit `Bun.serve` API. Bun also supports an alternate syntax.
|
||||
|
||||
```ts server.ts
|
||||
import { type Serve } from "bun";
|
||||
import type { Serve } from "bun";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
return new Response("Bun!");
|
||||
},
|
||||
} satisfies Serve;
|
||||
} satisfies Serve.Options<undefined>;
|
||||
```
|
||||
|
||||
The type parameter `<undefined>` represents WebSocket data — if you add a `websocket` handler with custom data attached via `server.upgrade(req, { data: ... })`, replace `undefined` with your data type.
|
||||
|
||||
Instead of passing the server options into `Bun.serve`, `export default` it. This file can be executed as-is; when Bun sees a file with a `default` export containing a `fetch` handler, it passes it into `Bun.serve` under the hood.
|
||||
|
||||
---
|
||||
|
||||
@@ -51,7 +51,7 @@ const response = await fetch("http://example.com", {
|
||||
|
||||
### Proxying requests
|
||||
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL.
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL string:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
@@ -59,6 +59,22 @@ const response = await fetch("http://example.com", {
|
||||
});
|
||||
```
|
||||
|
||||
You can also use an object format to send custom headers to the proxy server:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
proxy: {
|
||||
url: "http://proxy.com",
|
||||
headers: {
|
||||
"Proxy-Authorization": "Bearer my-token",
|
||||
"X-Custom-Proxy-Header": "value",
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The `headers` are sent directly to the proxy in `CONNECT` requests (for HTTPS targets) or in the proxy request (for HTTP targets). If you provide a `Proxy-Authorization` header, it overrides any credentials in the proxy URL.
|
||||
|
||||
### Custom headers
|
||||
|
||||
To set custom headers, pass an object with the `headers` property set to an object.
|
||||
|
||||
@@ -127,3 +127,54 @@ const socket = await Bun.udpSocket({
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Socket options
|
||||
|
||||
UDP sockets support setting various socket options:
|
||||
|
||||
```ts
|
||||
const socket = await Bun.udpSocket({});
|
||||
|
||||
// Enable broadcasting to send packets to a broadcast address
|
||||
socket.setBroadcast(true);
|
||||
|
||||
// Set the IP TTL (time to live) for outgoing packets
|
||||
socket.setTTL(64);
|
||||
```
|
||||
|
||||
### Multicast
|
||||
|
||||
Bun supports multicast operations for UDP sockets. Use `addMembership` and `dropMembership` to join and leave multicast groups:
|
||||
|
||||
```ts
|
||||
const socket = await Bun.udpSocket({});
|
||||
|
||||
// Join a multicast group
|
||||
socket.addMembership("224.0.0.1");
|
||||
|
||||
// Join with a specific interface
|
||||
socket.addMembership("224.0.0.1", "192.168.1.100");
|
||||
|
||||
// Leave a multicast group
|
||||
socket.dropMembership("224.0.0.1");
|
||||
```
|
||||
|
||||
Additional multicast options:
|
||||
|
||||
```ts
|
||||
// Set TTL for multicast packets (number of network hops)
|
||||
socket.setMulticastTTL(2);
|
||||
|
||||
// Control whether multicast packets loop back to the local socket
|
||||
socket.setMulticastLoopback(true);
|
||||
|
||||
// Specify which interface to use for outgoing multicast packets
|
||||
socket.setMulticastInterface("192.168.1.100");
|
||||
```
|
||||
|
||||
For source-specific multicast (SSM), use `addSourceSpecificMembership` and `dropSourceSpecificMembership`:
|
||||
|
||||
```ts
|
||||
socket.addSourceSpecificMembership("10.0.0.1", "232.0.0.1");
|
||||
socket.dropSourceSpecificMembership("10.0.0.1", "232.0.0.1");
|
||||
```
|
||||
|
||||
@@ -23,10 +23,11 @@ if (!githubToken) {
|
||||
name: "github-token",
|
||||
value: githubToken,
|
||||
});
|
||||
|
||||
console.log("GitHub token stored");
|
||||
}
|
||||
|
||||
const response = await fetch("https://api.github.com/name", {
|
||||
const response = await fetch("https://api.github.com/user", {
|
||||
headers: { Authorization: `token ${githubToken}` },
|
||||
});
|
||||
|
||||
|
||||
@@ -172,12 +172,25 @@ const query = db.query(`select "Hello world" as message`);
|
||||
```
|
||||
|
||||
<Note>
|
||||
Use the `.prepare()` method to prepare a query _without_ caching it on the `Database` instance.
|
||||
**What does "cached" mean?**
|
||||
|
||||
```ts
|
||||
// compile the prepared statement
|
||||
const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
```
|
||||
The caching refers to the **compiled prepared statement** (the SQL bytecode), not the query results. When you call `db.query()` with the same SQL string multiple times, Bun returns the same cached `Statement` object instead of recompiling the SQL.
|
||||
|
||||
It is completely safe to reuse a cached statement with different parameter values:
|
||||
|
||||
```ts
|
||||
const query = db.query("SELECT * FROM users WHERE id = ?");
|
||||
query.get(1); // ✓ Works
|
||||
query.get(2); // ✓ Also works - parameters are bound fresh each time
|
||||
query.get(3); // ✓ Still works
|
||||
```
|
||||
|
||||
Use `.prepare()` instead of `.query()` when you want a fresh `Statement` instance that isn't cached, for example if you're dynamically generating SQL and don't want to fill the cache with one-off queries.
|
||||
|
||||
```ts
|
||||
// compile the prepared statement without caching
|
||||
const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
```
|
||||
|
||||
</Note>
|
||||
|
||||
@@ -190,7 +203,7 @@ SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) wh
|
||||
To enable WAL mode, run this pragma query at the beginning of your application:
|
||||
|
||||
```ts db.ts icon="/icons/typescript.svg"
|
||||
db.exec("PRAGMA journal_mode = WAL;");
|
||||
db.run("PRAGMA journal_mode = WAL;");
|
||||
```
|
||||
|
||||
<Accordion title="What is WAL mode?">
|
||||
@@ -290,7 +303,7 @@ Internally, this calls [`sqlite3_reset`](https://www.sqlite.org/capi3ref.html#sq
|
||||
|
||||
### `.run()`
|
||||
|
||||
Use `.run()` to run a query and get back `undefined`. This is useful for schema-modifying queries (e.g. `CREATE TABLE`) or bulk write operations.
|
||||
Use `.run()` to run a query and get back an object with execution metadata. This is useful for schema-modifying queries (e.g. `CREATE TABLE`) or bulk write operations.
|
||||
|
||||
```ts db.ts icon="/icons/typescript.svg" highlight={2}
|
||||
const query = db.query(`create table foo;`);
|
||||
@@ -650,8 +663,8 @@ class Database {
|
||||
},
|
||||
);
|
||||
|
||||
prepare<ReturnType, Params>(sql: string): Statement<ReturnType, Params>;
|
||||
query<ReturnType, Params>(sql: string): Statement<ReturnType, Params>;
|
||||
query<ReturnType, ParamsType>(sql: string): Statement<ReturnType, ParamsType>;
|
||||
prepare<ReturnType, ParamsType>(sql: string): Statement<ReturnType, ParamsType>;
|
||||
run(sql: string, params?: SQLQueryBindings): { lastInsertRowid: number; changes: number };
|
||||
exec = this.run;
|
||||
|
||||
@@ -664,14 +677,14 @@ class Database {
|
||||
close(throwOnError?: boolean): void;
|
||||
}
|
||||
|
||||
class Statement<ReturnType, Params> {
|
||||
all(params: Params): ReturnType[];
|
||||
get(params: Params): ReturnType | undefined;
|
||||
run(params: Params): {
|
||||
class Statement<ReturnType, ParamsType> {
|
||||
all(...params: ParamsType[]): ReturnType[];
|
||||
get(...params: ParamsType[]): ReturnType | null;
|
||||
run(...params: ParamsType[]): {
|
||||
lastInsertRowid: number;
|
||||
changes: number;
|
||||
};
|
||||
values(params: Params): unknown[][];
|
||||
values(...params: ParamsType[]): unknown[][];
|
||||
|
||||
finalize(): void; // destroy statement and clean up resources
|
||||
toString(): string; // serialize to SQL
|
||||
@@ -682,7 +695,7 @@ class Statement<ReturnType, Params> {
|
||||
paramsCount: number; // the number of parameters expected by the statement
|
||||
native: any; // the native object representing the statement
|
||||
|
||||
as(Class: new () => ReturnType): this;
|
||||
as<T>(Class: new (...args: any[]) => T): Statement<T, ParamsType>;
|
||||
}
|
||||
|
||||
type SQLQueryBindings =
|
||||
|
||||
@@ -19,7 +19,7 @@ If you're looking to create a brand new empty project, use [`bun init`](/runtime
|
||||
`bun create ./MyComponent.tsx` turns an existing React component into a complete dev environment with hot reload and production builds in one command.
|
||||
|
||||
```bash
|
||||
$ bun create ./MyComponent.jsx # .tsx also supported
|
||||
bun create ./MyComponent.jsx # .tsx also supported
|
||||
```
|
||||
|
||||
<Frame>
|
||||
|
||||
49
docs/snippets/cli/bunx.mdx
Normal file
49
docs/snippets/cli/bunx.mdx
Normal file
@@ -0,0 +1,49 @@
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
bunx [flags] <package>[@version] [flags and arguments for the package]
|
||||
```
|
||||
|
||||
Execute an npm package executable (CLI), automatically installing into a global shared cache if not installed in `node_modules`.
|
||||
|
||||
### Flags
|
||||
|
||||
<ParamField path="--bun" type="boolean">
|
||||
Force the command to run with Bun instead of Node.js, even if the executable contains a Node shebang (`#!/usr/bin/env
|
||||
node`)
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="-p, --package" type="string">
|
||||
Specify package to install when binary name differs from package name
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--no-install" type="boolean">
|
||||
Skip installation if package is not already installed
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--verbose" type="boolean">
|
||||
Enable verbose output during installation
|
||||
</ParamField>
|
||||
|
||||
<ParamField path="--silent" type="boolean">
|
||||
Suppress output during installation
|
||||
</ParamField>
|
||||
|
||||
### Examples
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Run Prisma migrations
|
||||
bunx prisma migrate
|
||||
|
||||
# Format a file with Prettier
|
||||
bunx prettier foo.js
|
||||
|
||||
# Run a specific version of a package
|
||||
bunx uglify-js@3.14.0 app.js
|
||||
|
||||
# Use --package when binary name differs from package name
|
||||
bunx -p @angular/cli ng new my-app
|
||||
|
||||
# Force running with Bun instead of Node.js, even if the executable contains a Node shebang
|
||||
bunx --bun vite dev foo.js
|
||||
```
|
||||
@@ -149,25 +149,25 @@ div.callout .code-block {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.code-block[language="shellscript"] code span.line:not(:empty):has(span)::before {
|
||||
[language="shellscript"] code span.line:not(:empty):has(span)::before {
|
||||
content: "$ ";
|
||||
color: #6272a4;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.code-block[language="shellscript"] code span.line:has(> span:first-child[style*="color: rgb(98, 114, 164)"])::before,
|
||||
.code-block[language="shellscript"] code span.line:has(> span:first-child[style*="#6272A4"])::before {
|
||||
[language="shellscript"] code span.line:has(> span:first-child[style*="color: rgb(98, 114, 164)"])::before,
|
||||
[language="shellscript"] code span.line:has(> span:first-child[style*="#6272A4"])::before {
|
||||
content: "";
|
||||
}
|
||||
|
||||
.code-block[language="powershell"] code span.line:not(:empty):has(span)::before {
|
||||
[language="powershell"] code span.line:not(:empty):has(span)::before {
|
||||
content: "> ";
|
||||
color: #6272a4;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.code-block[language="powershell"] code span.line:has(> span:first-child[style*="color: rgb(98, 114, 164)"])::before,
|
||||
.code-block[language="powershell"] code span.line:has(> span:first-child[style*="#6272A4"])::before {
|
||||
[language="powershell"] code span.line:has(> span:first-child[style*="color: rgb(98, 114, 164)"])::before,
|
||||
[language="powershell"] code span.line:has(> span:first-child[style*="#6272A4"])::before {
|
||||
content: "";
|
||||
}
|
||||
|
||||
|
||||
@@ -376,16 +376,18 @@ timeout = 10000
|
||||
|
||||
## Environment Variables
|
||||
|
||||
You can also set environment variables in your configuration that affect test behavior:
|
||||
Environment variables for tests should be set using `.env` files. Bun automatically loads `.env` files from your project root. For test-specific variables, create a `.env.test` file:
|
||||
|
||||
```toml title="bunfig.toml" icon="settings"
|
||||
[env]
|
||||
NODE_ENV = "test"
|
||||
DATABASE_URL = "postgresql://localhost:5432/test_db"
|
||||
LOG_LEVEL = "error"
|
||||
```ini title=".env.test" icon="settings"
|
||||
NODE_ENV=test
|
||||
DATABASE_URL=postgresql://localhost:5432/test_db
|
||||
LOG_LEVEL=error
|
||||
```
|
||||
|
||||
[test]
|
||||
coverage = true
|
||||
Then load it with `--env-file`:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun test --env-file=.env.test
|
||||
```
|
||||
|
||||
## Complete Configuration Example
|
||||
@@ -398,13 +400,6 @@ Here's a comprehensive example showing all available test configuration options:
|
||||
registry = "https://registry.npmjs.org/"
|
||||
exact = true
|
||||
|
||||
[env]
|
||||
# Environment variables for tests
|
||||
NODE_ENV = "test"
|
||||
DATABASE_URL = "postgresql://localhost:5432/test_db"
|
||||
API_URL = "http://localhost:3001"
|
||||
LOG_LEVEL = "error"
|
||||
|
||||
[test]
|
||||
# Test discovery
|
||||
root = "src"
|
||||
|
||||
@@ -428,26 +428,26 @@ test("foo, bar, baz", () => {
|
||||
const barSpy = spyOn(barModule, "bar");
|
||||
const bazSpy = spyOn(bazModule, "baz");
|
||||
|
||||
// Original values
|
||||
expect(fooSpy).toBe("foo");
|
||||
expect(barSpy).toBe("bar");
|
||||
expect(bazSpy).toBe("baz");
|
||||
// Original implementations still work
|
||||
expect(fooModule.foo()).toBe("foo");
|
||||
expect(barModule.bar()).toBe("bar");
|
||||
expect(bazModule.baz()).toBe("baz");
|
||||
|
||||
// Mock implementations
|
||||
fooSpy.mockImplementation(() => 42);
|
||||
barSpy.mockImplementation(() => 43);
|
||||
bazSpy.mockImplementation(() => 44);
|
||||
|
||||
expect(fooSpy()).toBe(42);
|
||||
expect(barSpy()).toBe(43);
|
||||
expect(bazSpy()).toBe(44);
|
||||
expect(fooModule.foo()).toBe(42);
|
||||
expect(barModule.bar()).toBe(43);
|
||||
expect(bazModule.baz()).toBe(44);
|
||||
|
||||
// Restore all
|
||||
mock.restore();
|
||||
|
||||
expect(fooSpy()).toBe("foo");
|
||||
expect(barSpy()).toBe("bar");
|
||||
expect(bazSpy()).toBe("baz");
|
||||
expect(fooModule.foo()).toBe("foo");
|
||||
expect(barModule.bar()).toBe("bar");
|
||||
expect(bazModule.baz()).toBe("baz");
|
||||
});
|
||||
```
|
||||
|
||||
@@ -455,10 +455,10 @@ Using `mock.restore()` can reduce the amount of code in your tests by adding it
|
||||
|
||||
## Vitest Compatibility
|
||||
|
||||
For added compatibility with tests written for Vitest, Bun provides the `vi` global object as an alias for parts of the Jest mocking API:
|
||||
For added compatibility with tests written for Vitest, Bun provides the `vi` object as an alias for parts of the Jest mocking API:
|
||||
|
||||
```ts title="test.ts" icon="/icons/typescript.svg"
|
||||
import { test, expect } from "bun:test";
|
||||
import { test, expect, vi } from "bun:test";
|
||||
|
||||
// Using the 'vi' alias similar to Vitest
|
||||
test("vitest compatibility", () => {
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
pkgs.cmake # Expected: 3.30+ on nixos-unstable as of 2025-10
|
||||
pkgs.ninja
|
||||
pkgs.pkg-config
|
||||
pkgs.sccache
|
||||
pkgs.ccache
|
||||
|
||||
# Compilers and toolchain - version pinned to LLVM 19
|
||||
clang
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.3.4",
|
||||
"version": "1.3.6",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
@@ -23,7 +23,8 @@
|
||||
},
|
||||
"resolutions": {
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
"@types/bun": "workspace:packages/@types/bun"
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"@types/node": "25.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun --silent run build:debug",
|
||||
@@ -87,7 +88,7 @@
|
||||
"node:test:cp": "bun ./scripts/fetch-node-test.ts ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
|
||||
"machine:linux:ubuntu": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=ubuntu --release=25.04",
|
||||
"machine:linux:debian": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=debian --release=12",
|
||||
"machine:linux:debian": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=debian --release=13",
|
||||
"machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.22",
|
||||
"machine:linux:amazonlinux": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=amazonlinux --release=2023",
|
||||
"machine:windows:2019": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=windows --release=2019",
|
||||
|
||||
@@ -615,7 +615,6 @@ const NativeStackFrame = ({
|
||||
<div
|
||||
title={StackFrameScope[scope]}
|
||||
className="BunError-StackFrame-identifier"
|
||||
// @ts-expect-error Custom CSS variables are not known by TypeScript
|
||||
style={{ "--max-length": `${maxLength}ch` }}
|
||||
>
|
||||
{getNativeStackFrameIdentifier(frame)}
|
||||
|
||||
995
packages/bun-types/bun.d.ts
vendored
995
packages/bun-types/bun.d.ts
vendored
File diff suppressed because it is too large
Load Diff
74
packages/bun-types/bundle.d.ts
vendored
Normal file
74
packages/bun-types/bundle.d.ts
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* The `bun:bundle` module provides compile-time utilities for dead-code elimination.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { feature } from "bun:bundle";
|
||||
*
|
||||
* if (feature("SUPER_SECRET")) {
|
||||
* console.log("Secret feature enabled!");
|
||||
* } else {
|
||||
* console.log("Normal mode");
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Enable feature flags via CLI:
|
||||
* ```bash
|
||||
* # During build
|
||||
* bun build --feature=SUPER_SECRET index.ts
|
||||
*
|
||||
* # At runtime
|
||||
* bun run --feature=SUPER_SECRET index.ts
|
||||
*
|
||||
* # In tests
|
||||
* bun test --feature=SUPER_SECRET
|
||||
* ```
|
||||
*
|
||||
* @module bun:bundle
|
||||
*/
|
||||
declare module "bun:bundle" {
|
||||
/**
|
||||
* Registry for type-safe feature flags.
|
||||
*
|
||||
* Augment this interface to get autocomplete and type checking for your feature flags:
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // env.d.ts
|
||||
* declare module "bun:bundle" {
|
||||
* interface Registry {
|
||||
* features: "DEBUG" | "PREMIUM" | "BETA";
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* Now `feature()` only accepts `"DEBUG"`, `"PREMIUM"`, or `"BETA"`:
|
||||
* ```ts
|
||||
* feature("DEBUG"); // OK
|
||||
* feature("TYPO"); // Type error
|
||||
* ```
|
||||
*/
|
||||
interface Registry {}
|
||||
|
||||
/**
|
||||
* Check if a feature flag is enabled at compile time.
|
||||
*
|
||||
* This function is replaced with a boolean literal (`true` or `false`) at bundle time,
|
||||
* enabling dead-code elimination. The bundler will remove unreachable branches.
|
||||
*
|
||||
* @param flag - The name of the feature flag to check
|
||||
* @returns `true` if the flag was passed via `--feature=FLAG`, `false` otherwise
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { feature } from "bun:bundle";
|
||||
*
|
||||
* // With --feature=DEBUG, this becomes: if (true) { ... }
|
||||
* // Without --feature=DEBUG, this becomes: if (false) { ... }
|
||||
* if (feature("DEBUG")) {
|
||||
* console.log("Debug mode enabled");
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
function feature(flag: Registry extends { features: infer Features extends string } ? Features : string): boolean;
|
||||
}
|
||||
91
packages/bun-types/globals.d.ts
vendored
91
packages/bun-types/globals.d.ts
vendored
@@ -1,9 +1,12 @@
|
||||
declare module "bun" {
|
||||
namespace __internal {
|
||||
type NodeCryptoWebcryptoSubtleCrypto = import("crypto").webcrypto.SubtleCrypto;
|
||||
type NodeCryptoWebcryptoCryptoKey = import("crypto").webcrypto.CryptoKey;
|
||||
type NodeCryptoWebcryptoCryptoKeyPair = import("crypto").webcrypto.CryptoKeyPair;
|
||||
|
||||
type LibEmptyOrNodeCryptoWebcryptoSubtleCrypto = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("crypto").webcrypto.SubtleCrypto;
|
||||
|
||||
type LibWorkerOrBunWorker = LibDomIsLoaded extends true ? {} : Bun.Worker;
|
||||
type LibEmptyOrBunWebSocket = LibDomIsLoaded extends true ? {} : Bun.WebSocket;
|
||||
|
||||
@@ -14,7 +17,9 @@ declare module "bun" {
|
||||
? {}
|
||||
: import("node:stream/web").DecompressionStream;
|
||||
|
||||
type LibPerformanceOrNodePerfHooksPerformance = LibDomIsLoaded extends true ? {} : import("perf_hooks").Performance;
|
||||
type LibPerformanceOrNodePerfHooksPerformance = LibDomIsLoaded extends true
|
||||
? {}
|
||||
: import("node:perf_hooks").Performance;
|
||||
type LibEmptyOrPerformanceEntry = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceEntry;
|
||||
type LibEmptyOrPerformanceMark = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceMark;
|
||||
type LibEmptyOrPerformanceMeasure = LibDomIsLoaded extends true ? {} : import("node:perf_hooks").PerformanceMeasure;
|
||||
@@ -83,6 +88,24 @@ declare var WritableStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
{
|
||||
prototype: CompressionStream;
|
||||
new (format: Bun.CompressionFormat): CompressionStream;
|
||||
}
|
||||
>;
|
||||
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebDecompressionStream {}
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
{
|
||||
prototype: DecompressionStream;
|
||||
new (format: Bun.CompressionFormat): DecompressionStream;
|
||||
}
|
||||
>;
|
||||
|
||||
interface Worker extends Bun.__internal.LibWorkerOrBunWorker {}
|
||||
declare var Worker: Bun.__internal.UseLibDomIfAvailable<
|
||||
"Worker",
|
||||
@@ -206,7 +229,7 @@ interface TextEncoder extends Bun.__internal.LibEmptyOrNodeUtilTextEncoder {
|
||||
* @param src The text to encode.
|
||||
* @param dest The array to hold the encode result.
|
||||
*/
|
||||
encodeInto(src?: string, dest?: Bun.BufferSource): import("util").EncodeIntoResult;
|
||||
encodeInto(src?: string, dest?: Bun.BufferSource): import("node:util").TextEncoderEncodeIntoResult;
|
||||
}
|
||||
declare var TextEncoder: Bun.__internal.UseLibDomIfAvailable<
|
||||
"TextEncoder",
|
||||
@@ -278,30 +301,6 @@ declare var Event: {
|
||||
new (type: string, eventInitDict?: Bun.EventInit): Event;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
typeof import("node:stream/web").CompressionStream
|
||||
>;
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
typeof import("node:stream/web").DecompressionStream
|
||||
>;
|
||||
|
||||
interface EventTarget {
|
||||
/**
|
||||
* Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value.
|
||||
@@ -958,7 +957,7 @@ declare function alert(message?: string): void;
|
||||
declare function confirm(message?: string): boolean;
|
||||
declare function prompt(message?: string, _default?: string): string | null;
|
||||
|
||||
interface SubtleCrypto extends Bun.__internal.NodeCryptoWebcryptoSubtleCrypto {}
|
||||
interface SubtleCrypto extends Bun.__internal.LibEmptyOrNodeCryptoWebcryptoSubtleCrypto {}
|
||||
declare var SubtleCrypto: {
|
||||
prototype: SubtleCrypto;
|
||||
new (): SubtleCrypto;
|
||||
@@ -1694,6 +1693,10 @@ declare var EventSource: Bun.__internal.UseLibDomIfAvailable<
|
||||
|
||||
interface Performance extends Bun.__internal.LibPerformanceOrNodePerfHooksPerformance {}
|
||||
declare var performance: Bun.__internal.UseLibDomIfAvailable<"performance", Performance>;
|
||||
declare var Performance: Bun.__internal.UseLibDomIfAvailable<
|
||||
"Performance",
|
||||
{ new (): Performance; prototype: Performance }
|
||||
>;
|
||||
|
||||
interface PerformanceEntry extends Bun.__internal.LibEmptyOrPerformanceEntry {}
|
||||
declare var PerformanceEntry: Bun.__internal.UseLibDomIfAvailable<
|
||||
@@ -1920,14 +1923,44 @@ interface BunFetchRequestInit extends RequestInit {
|
||||
* Override http_proxy or HTTPS_PROXY
|
||||
* This is a custom property that is not part of the Fetch API specification.
|
||||
*
|
||||
* Can be a string URL or an object with `url` and optional `headers`.
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* // String format
|
||||
* const response = await fetch("http://example.com", {
|
||||
* proxy: "https://username:password@127.0.0.1:8080"
|
||||
* });
|
||||
*
|
||||
* // Object format with custom headers sent to the proxy
|
||||
* const response = await fetch("http://example.com", {
|
||||
* proxy: {
|
||||
* url: "https://127.0.0.1:8080",
|
||||
* headers: {
|
||||
* "Proxy-Authorization": "Bearer token",
|
||||
* "X-Custom-Proxy-Header": "value"
|
||||
* }
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* If a `Proxy-Authorization` header is provided in `proxy.headers`, it takes
|
||||
* precedence over credentials parsed from the proxy URL.
|
||||
*/
|
||||
proxy?: string;
|
||||
proxy?:
|
||||
| string
|
||||
| {
|
||||
/**
|
||||
* The proxy URL
|
||||
*/
|
||||
url: string;
|
||||
/**
|
||||
* Custom headers to send to the proxy server.
|
||||
* These headers are sent in the CONNECT request (for HTTPS targets)
|
||||
* or in the proxy request (for HTTP targets).
|
||||
*/
|
||||
headers?: Bun.HeadersInit;
|
||||
};
|
||||
|
||||
/**
|
||||
* Override the default S3 options
|
||||
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -23,6 +23,7 @@
|
||||
/// <reference path="./serve.d.ts" />
|
||||
/// <reference path="./sql.d.ts" />
|
||||
/// <reference path="./security.d.ts" />
|
||||
/// <reference path="./bundle.d.ts" />
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
|
||||
21
packages/bun-types/overrides.d.ts
vendored
21
packages/bun-types/overrides.d.ts
vendored
@@ -86,7 +86,7 @@ declare global {
|
||||
reallyExit(code?: number): never;
|
||||
dlopen(module: { exports: any }, filename: string, flags?: number): void;
|
||||
_exiting: boolean;
|
||||
noDeprecation: boolean;
|
||||
noDeprecation?: boolean | undefined;
|
||||
|
||||
binding(m: "constants"): {
|
||||
os: typeof import("node:os").constants;
|
||||
@@ -308,11 +308,11 @@ declare global {
|
||||
}
|
||||
}
|
||||
|
||||
declare module "fs/promises" {
|
||||
declare module "node:fs/promises" {
|
||||
function exists(path: Bun.PathLike): Promise<boolean>;
|
||||
}
|
||||
|
||||
declare module "tls" {
|
||||
declare module "node:tls" {
|
||||
interface BunConnectionOptions extends Omit<ConnectionOptions, "key" | "ca" | "tls" | "cert"> {
|
||||
/**
|
||||
* Optionally override the trusted CA certificates. Default is to trust
|
||||
@@ -359,3 +359,18 @@ declare module "tls" {
|
||||
|
||||
function connect(options: BunConnectionOptions, secureConnectListener?: () => void): TLSSocket;
|
||||
}
|
||||
|
||||
declare module "console" {
|
||||
interface Console {
|
||||
/**
|
||||
* Asynchronously read lines from standard input (fd 0)
|
||||
*
|
||||
* ```ts
|
||||
* for await (const line of console) {
|
||||
* console.log(line);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
[Symbol.asyncIterator](): AsyncIterableIterator<string>;
|
||||
}
|
||||
}
|
||||
|
||||
68
packages/bun-types/s3.d.ts
vendored
68
packages/bun-types/s3.d.ts
vendored
@@ -11,9 +11,9 @@ declare module "bun" {
|
||||
* If the file descriptor is not writable yet, the data is buffered.
|
||||
*
|
||||
* @param chunk The data to write
|
||||
* @returns Number of bytes written
|
||||
* @returns Number of bytes written or, if the write is pending, a Promise resolving to the number of bytes
|
||||
*/
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number | Promise<number>;
|
||||
/**
|
||||
* Flush the internal buffer, committing the data to disk or the pipe.
|
||||
*
|
||||
@@ -78,9 +78,9 @@ declare module "bun" {
|
||||
* If the network is not writable yet, the data is buffered.
|
||||
*
|
||||
* @param chunk The data to write
|
||||
* @returns Number of bytes written
|
||||
* @returns Number of bytes written or, if the write is pending, a Promise resolving to the number of bytes
|
||||
*/
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number | Promise<number>;
|
||||
/**
|
||||
* Flush the internal buffer, committing the data to the network.
|
||||
*
|
||||
@@ -281,6 +281,24 @@ declare module "bun" {
|
||||
*/
|
||||
type?: string;
|
||||
|
||||
/**
|
||||
* The Content-Disposition header value.
|
||||
* Controls how the file is presented when downloaded.
|
||||
*
|
||||
* @example
|
||||
* // Setting attachment disposition with filename
|
||||
* const file = s3.file("report.pdf", {
|
||||
* contentDisposition: "attachment; filename=\"quarterly-report.pdf\""
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // Setting inline disposition
|
||||
* await s3.write("image.png", imageData, {
|
||||
* contentDisposition: "inline"
|
||||
* });
|
||||
*/
|
||||
contentDisposition?: string | undefined;
|
||||
|
||||
/**
|
||||
* By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects.
|
||||
*
|
||||
@@ -303,6 +321,30 @@ declare module "bun" {
|
||||
| "SNOW"
|
||||
| "STANDARD_IA";
|
||||
|
||||
/**
|
||||
* When set to `true`, confirms that the requester knows they will be charged
|
||||
* for the request and data transfer costs. Required for accessing objects
|
||||
* in Requester Pays buckets.
|
||||
*
|
||||
* @see https://docs.aws.amazon.com/AmazonS3/latest/userguide/RequesterPaysBuckets.html
|
||||
*
|
||||
* @example
|
||||
* // Accessing a file in a Requester Pays bucket
|
||||
* const file = s3.file("data.csv", {
|
||||
* bucket: "requester-pays-bucket",
|
||||
* requestPayer: true
|
||||
* });
|
||||
* const content = await file.text();
|
||||
*
|
||||
* @example
|
||||
* // Uploading to a Requester Pays bucket
|
||||
* await s3.write("output.json", data, {
|
||||
* bucket: "requester-pays-bucket",
|
||||
* requestPayer: true
|
||||
* });
|
||||
*/
|
||||
requestPayer?: boolean;
|
||||
|
||||
/**
|
||||
* @deprecated The size of the internal buffer in bytes. Defaults to 5 MiB. use `partSize` and `queueSize` instead.
|
||||
*/
|
||||
@@ -567,7 +609,17 @@ declare module "bun" {
|
||||
* });
|
||||
*/
|
||||
write(
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
|
||||
data:
|
||||
| string
|
||||
| ArrayBufferView
|
||||
| ArrayBuffer
|
||||
| SharedArrayBuffer
|
||||
| Request
|
||||
| Response
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
@@ -878,7 +930,8 @@ declare module "bun" {
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| File,
|
||||
| File
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
@@ -928,7 +981,8 @@ declare module "bun" {
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| File,
|
||||
| File
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
|
||||
13
packages/bun-types/serve.d.ts
vendored
13
packages/bun-types/serve.d.ts
vendored
@@ -446,7 +446,7 @@ declare module "bun" {
|
||||
closeOnBackpressureLimit?: boolean;
|
||||
|
||||
/**
|
||||
* Sets the the number of seconds to wait before timing out a connection
|
||||
* Sets the number of seconds to wait before timing out a connection
|
||||
* due to no messages or pings.
|
||||
*
|
||||
* @default 120
|
||||
@@ -758,7 +758,7 @@ declare module "bun" {
|
||||
ipv6Only?: boolean;
|
||||
|
||||
/**
|
||||
* Sets the the number of seconds to wait before timing out a connection
|
||||
* Sets the number of seconds to wait before timing out a connection
|
||||
* due to inactivity.
|
||||
*
|
||||
* @default 10
|
||||
@@ -1082,6 +1082,15 @@ declare module "bun" {
|
||||
*/
|
||||
readonly hostname: string | undefined;
|
||||
|
||||
/**
|
||||
* The protocol the server is listening on.
|
||||
*
|
||||
* - "http" for normal servers
|
||||
* - "https" when TLS is enabled
|
||||
* - null for unix sockets or when unavailable
|
||||
*/
|
||||
readonly protocol: "http" | "https" | null;
|
||||
|
||||
/**
|
||||
* Is the server running in development mode?
|
||||
*
|
||||
|
||||
31
packages/bun-types/sqlite.d.ts
vendored
31
packages/bun-types/sqlite.d.ts
vendored
@@ -154,12 +154,6 @@ declare module "bun:sqlite" {
|
||||
* | `bigint` | `INTEGER` |
|
||||
* | `null` | `NULL` |
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* ```
|
||||
*
|
||||
* Useful for queries like:
|
||||
* - `CREATE TABLE`
|
||||
* - `INSERT INTO`
|
||||
@@ -180,8 +174,14 @@ declare module "bun:sqlite" {
|
||||
*
|
||||
* @param sql The SQL query to run
|
||||
* @param bindings Optional bindings for the query
|
||||
* @returns A `Changes` object with `changes` and `lastInsertRowid` properties
|
||||
*
|
||||
* @returns `Database` instance
|
||||
* @example
|
||||
* ```ts
|
||||
* db.run("CREATE TABLE foo (bar TEXT)");
|
||||
* db.run("INSERT INTO foo VALUES (?)", ["baz"]);
|
||||
* // => { changes: 1, lastInsertRowid: 1 }
|
||||
* ```
|
||||
*/
|
||||
run<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
|
||||
|
||||
@@ -670,18 +670,19 @@ declare module "bun:sqlite" {
|
||||
* Execute the prepared statement.
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the statement is run with the last bound values or no parameters if there are none.
|
||||
* @returns A `Changes` object with `changes` and `lastInsertRowid` properties
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stmt = db.prepare("UPDATE foo SET bar = ?");
|
||||
* stmt.run("baz");
|
||||
* // => undefined
|
||||
* const insert = db.prepare("INSERT INTO users (name) VALUES (?)");
|
||||
* insert.run("Alice");
|
||||
* // => { changes: 1, lastInsertRowid: 1 }
|
||||
* insert.run("Bob");
|
||||
* // => { changes: 1, lastInsertRowid: 2 }
|
||||
*
|
||||
* stmt.run();
|
||||
* // => undefined
|
||||
*
|
||||
* stmt.run("foo");
|
||||
* // => undefined
|
||||
* const update = db.prepare("UPDATE users SET name = ? WHERE id = ?");
|
||||
* update.run("Charlie", 1);
|
||||
* // => { changes: 1, lastInsertRowid: 2 }
|
||||
* ```
|
||||
*
|
||||
* The following types can be used when binding parameters:
|
||||
|
||||
36
packages/bun-types/test.d.ts
vendored
36
packages/bun-types/test.d.ts
vendored
@@ -95,8 +95,15 @@ declare module "bun:test" {
|
||||
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
function setSystemTime(now?: number | Date): void;
|
||||
function setTimeout(milliseconds: number): void;
|
||||
function useFakeTimers(): void;
|
||||
function useRealTimers(): void;
|
||||
function useFakeTimers(options?: { now?: number | Date }): typeof vi;
|
||||
function useRealTimers(): typeof vi;
|
||||
function advanceTimersByTime(milliseconds: number): typeof vi;
|
||||
function advanceTimersToNextTimer(): typeof vi;
|
||||
function runAllTimers(): typeof vi;
|
||||
function runOnlyPendingTimers(): typeof vi;
|
||||
function getTimerCount(): number;
|
||||
function clearAllTimers(): void;
|
||||
function isFakeTimers(): boolean;
|
||||
function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
@@ -184,6 +191,13 @@ declare module "bun:test" {
|
||||
resetAllMocks: typeof jest.resetAllMocks;
|
||||
useFakeTimers: typeof jest.useFakeTimers;
|
||||
useRealTimers: typeof jest.useRealTimers;
|
||||
advanceTimersByTime: typeof jest.advanceTimersByTime;
|
||||
advanceTimersToNextTimer: typeof jest.advanceTimersToNextTimer;
|
||||
runAllTimers: typeof jest.runAllTimers;
|
||||
runOnlyPendingTimers: typeof jest.runOnlyPendingTimers;
|
||||
getTimerCount: typeof jest.getTimerCount;
|
||||
clearAllTimers: typeof jest.clearAllTimers;
|
||||
isFakeTimers: typeof jest.isFakeTimers;
|
||||
};
|
||||
|
||||
interface FunctionLike {
|
||||
@@ -414,6 +428,8 @@ declare module "bun:test" {
|
||||
}
|
||||
|
||||
namespace __internal {
|
||||
type IfNeverThenElse<T, Else> = [T] extends [never] ? Else : T;
|
||||
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
@@ -1083,8 +1099,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKey(expected: keyof T): void;
|
||||
toContainKey<X = T>(expected: NoInfer<keyof X>): void;
|
||||
toContainKey(expected: __internal.IfNeverThenElse<keyof T, PropertyKey>): void;
|
||||
toContainKey<X = T>(expected: __internal.IfNeverThenElse<NoInfer<keyof X>, PropertyKey>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1100,8 +1116,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllKeys(expected: Array<keyof T>): void;
|
||||
toContainAllKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
toContainAllKeys(expected: Array<__internal.IfNeverThenElse<keyof T, PropertyKey>>): void;
|
||||
toContainAllKeys<X = T>(expected: Array<__internal.IfNeverThenElse<NoInfer<keyof X>, PropertyKey>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains at least one of the provided keys.
|
||||
@@ -1117,8 +1133,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyKeys(expected: Array<keyof T>): void;
|
||||
toContainAnyKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
toContainAnyKeys(expected: Array<__internal.IfNeverThenElse<keyof T, PropertyKey>>): void;
|
||||
toContainAnyKeys<X = T>(expected: Array<__internal.IfNeverThenElse<NoInfer<keyof X>, PropertyKey>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
@@ -1210,8 +1226,8 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKeys(expected: Array<keyof T>): void;
|
||||
toContainKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
toContainKeys(expected: Array<__internal.IfNeverThenElse<keyof T, PropertyKey>>): void;
|
||||
toContainKeys<X = T>(expected: Array<__internal.IfNeverThenElse<NoInfer<keyof X>, PropertyKey>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains and equals what is expected.
|
||||
|
||||
4
packages/bun-types/wasm.d.ts
vendored
4
packages/bun-types/wasm.d.ts
vendored
@@ -100,8 +100,8 @@ declare module "bun" {
|
||||
|
||||
declare namespace WebAssembly {
|
||||
interface ValueTypeMap extends Bun.WebAssembly.ValueTypeMap {}
|
||||
interface GlobalDescriptor<T extends keyof ValueTypeMap = keyof ValueTypeMap>
|
||||
extends Bun.WebAssembly.GlobalDescriptor<T> {}
|
||||
interface GlobalDescriptor<T extends keyof ValueTypeMap = keyof ValueTypeMap> extends Bun.WebAssembly
|
||||
.GlobalDescriptor<T> {}
|
||||
interface MemoryDescriptor extends Bun.WebAssembly.MemoryDescriptor {}
|
||||
interface ModuleExportDescriptor extends Bun.WebAssembly.ModuleExportDescriptor {}
|
||||
interface ModuleImportDescriptor extends Bun.WebAssembly.ModuleImportDescriptor {}
|
||||
|
||||
@@ -54,8 +54,8 @@ void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls) {
|
||||
s->next = loop->data.closed_head;
|
||||
loop->data.closed_head = s;
|
||||
|
||||
/* Any socket with prev = context is marked as closed */
|
||||
s->prev = (struct us_socket_t *) context;
|
||||
/* Mark the socket as closed */
|
||||
s->flags.is_closed = 1;
|
||||
}
|
||||
|
||||
/* We cannot immediately free a listen socket as we can be inside an accept loop */
|
||||
@@ -154,7 +154,9 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_listen_socket(int ssl, struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
|
||||
struct us_socket_t* s = &ls->s;
|
||||
if(us_socket_is_closed(ssl, s)) return;
|
||||
s->context = context;
|
||||
s->next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
s->prev = 0;
|
||||
@@ -166,6 +168,8 @@ void us_internal_socket_context_link_listen_socket(int ssl, struct us_socket_con
|
||||
}
|
||||
|
||||
void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
if(c->closed) return;
|
||||
|
||||
c->context = context;
|
||||
c->next_pending = context->head_connecting_sockets;
|
||||
c->prev_pending = 0;
|
||||
@@ -180,6 +184,8 @@ void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
if(us_socket_is_closed(ssl,s)) return;
|
||||
|
||||
s->context = context;
|
||||
s->next = context->head_sockets;
|
||||
s->prev = 0;
|
||||
@@ -386,6 +392,9 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
|
||||
s->flags.low_prio_state = 0;
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->flags.is_closed = 0;
|
||||
s->flags.adopted = 0;
|
||||
s->flags.is_tls = ssl;
|
||||
s->next = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
@@ -422,6 +431,9 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->flags.is_closed = 0;
|
||||
s->flags.adopted = 0;
|
||||
s->flags.is_tls = ssl;
|
||||
s->next = 0;
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
|
||||
@@ -430,7 +442,7 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
return ls;
|
||||
}
|
||||
|
||||
struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_context_t *context, struct sockaddr_storage* addr, int options, int socket_ext_size) {
|
||||
struct us_socket_t* us_socket_context_connect_resolved_dns(int ssl, struct us_socket_context_t *context, struct sockaddr_storage* addr, int options, int socket_ext_size) {
|
||||
LIBUS_SOCKET_DESCRIPTOR connect_socket_fd = bsd_create_connect_socket(addr, options);
|
||||
if (connect_socket_fd == LIBUS_SOCKET_ERROR) {
|
||||
return NULL;
|
||||
@@ -453,6 +465,9 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
|
||||
socket->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
socket->flags.is_paused = 0;
|
||||
socket->flags.is_ipc = 0;
|
||||
socket->flags.is_closed = 0;
|
||||
socket->flags.adopted = 0;
|
||||
socket->flags.is_tls = ssl;
|
||||
socket->connect_state = NULL;
|
||||
socket->connect_next = NULL;
|
||||
|
||||
@@ -514,7 +529,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
struct sockaddr_storage addr;
|
||||
if (try_parse_ip(host, port, &addr)) {
|
||||
*has_dns_resolved = 1;
|
||||
return us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
return us_socket_context_connect_resolved_dns(ssl, context, &addr, options, socket_ext_size);
|
||||
}
|
||||
|
||||
struct addrinfo_request* ai_req;
|
||||
@@ -534,7 +549,7 @@ void *us_socket_context_connect(int ssl, struct us_socket_context_t *context, co
|
||||
struct sockaddr_storage addr;
|
||||
init_addr_with_port(&entries->info, port, &addr);
|
||||
*has_dns_resolved = 1;
|
||||
struct us_socket_t *s = us_socket_context_connect_resolved_dns(context, &addr, options, socket_ext_size);
|
||||
struct us_socket_t *s = us_socket_context_connect_resolved_dns(ssl, context, &addr, options, socket_ext_size);
|
||||
Bun__addrinfo_freeRequest(ai_req, s == NULL);
|
||||
return s;
|
||||
}
|
||||
@@ -583,6 +598,9 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
flags->allow_half_open = (c->options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
flags->is_paused = 0;
|
||||
flags->is_ipc = 0;
|
||||
flags->is_closed = 0;
|
||||
flags->adopted = 0;
|
||||
flags->is_tls = c->ssl;
|
||||
/* Link it into context so that timeout fires properly */
|
||||
us_internal_socket_context_link_socket(0, context, s);
|
||||
|
||||
@@ -760,6 +778,9 @@ struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_con
|
||||
connect_socket->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
connect_socket->flags.is_paused = 0;
|
||||
connect_socket->flags.is_ipc = 0;
|
||||
connect_socket->flags.is_closed = 0;
|
||||
connect_socket->flags.adopted = 0;
|
||||
connect_socket->flags.is_tls = ssl;
|
||||
connect_socket->connect_state = NULL;
|
||||
connect_socket->connect_next = NULL;
|
||||
us_internal_socket_context_link_socket(ssl, context, connect_socket);
|
||||
@@ -780,10 +801,10 @@ struct us_socket_context_t *us_create_child_socket_context(int ssl, struct us_so
|
||||
}
|
||||
|
||||
/* Note: This will set timeout to 0 */
|
||||
struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s, int ext_size) {
|
||||
struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s, int old_ext_size, int ext_size) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
return (struct us_socket_t *) us_internal_ssl_socket_context_adopt_socket((struct us_internal_ssl_socket_context_t *) context, (struct us_internal_ssl_socket_t *) s, ext_size);
|
||||
return (struct us_socket_t *) us_internal_ssl_socket_context_adopt_socket((struct us_internal_ssl_socket_context_t *) context, (struct us_internal_ssl_socket_t *) s, old_ext_size, ext_size);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -807,7 +828,18 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
struct us_socket_t *new_s = s;
|
||||
if (ext_size != -1) {
|
||||
struct us_poll_t *pool_ref = &s->p;
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size);
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + old_ext_size, sizeof(struct us_socket_t) + ext_size);
|
||||
if(new_s != s) {
|
||||
/* Mark the old socket as closed */
|
||||
s->flags.is_closed = 1;
|
||||
/* Link this socket to the close-list and let it be deleted after this iteration */
|
||||
s->next = s->context->loop->data.closed_head;
|
||||
s->context->loop->data.closed_head = s;
|
||||
/* Mark the old socket as adopted (reallocated) */
|
||||
s->flags.adopted = 1;
|
||||
/* Tell the event loop what is the new socket so we can process to send info to the right place and callbacks like more data and EOF*/
|
||||
s->prev = new_s;
|
||||
}
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
c->context = context;
|
||||
|
||||
@@ -396,7 +396,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
|
||||
int result = SSL_do_handshake(s->ssl);
|
||||
|
||||
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return;
|
||||
@@ -417,7 +417,7 @@ void us_internal_update_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
s->handshake_state = HANDSHAKE_PENDING;
|
||||
s->ssl_write_wants_read = 1;
|
||||
|
||||
s->s.flags.last_write_failed = 1;
|
||||
return;
|
||||
}
|
||||
// success
|
||||
@@ -434,6 +434,7 @@ ssl_on_close(struct us_internal_ssl_socket_t *s, int code, void *reason) {
|
||||
struct us_internal_ssl_socket_t * ret = context->on_close(s, code, reason);
|
||||
SSL_free(s->ssl); // free SSL after on_close
|
||||
s->ssl = NULL; // set to NULL
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -1855,15 +1856,16 @@ void us_internal_ssl_socket_shutdown(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_adopt_socket(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
struct us_internal_ssl_socket_t *s, int ext_size) {
|
||||
struct us_internal_ssl_socket_t *s, int old_ext_size, int ext_size) {
|
||||
// todo: this is completely untested
|
||||
int new_old_ext_size = sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) + old_ext_size;
|
||||
int new_ext_size = ext_size;
|
||||
if (ext_size != -1) {
|
||||
new_ext_size = sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) + ext_size;
|
||||
}
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_context_adopt_socket(
|
||||
0, &context->sc, &s->s,
|
||||
new_ext_size);
|
||||
new_old_ext_size, new_ext_size);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
@@ -1920,10 +1922,11 @@ ssl_wrapped_context_on_data(struct us_internal_ssl_socket_t *s, char *data,
|
||||
struct us_wrapped_socket_context_t *wrapped_context =
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
// raw data if needed
|
||||
// raw data if needed
|
||||
if (wrapped_context->old_events.on_data) {
|
||||
wrapped_context->old_events.on_data((struct us_socket_t *)s, data, length);
|
||||
}
|
||||
|
||||
// ssl wrapped data
|
||||
return ssl_on_data(s, data, length);
|
||||
}
|
||||
@@ -2028,7 +2031,7 @@ us_internal_ssl_socket_open(struct us_internal_ssl_socket_t *s, int is_client,
|
||||
// already opened
|
||||
if (s->ssl)
|
||||
return s;
|
||||
|
||||
|
||||
// start SSL open
|
||||
return ssl_on_open(s, is_client, ip, ip_length, NULL);
|
||||
}
|
||||
@@ -2040,6 +2043,7 @@ struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r
|
||||
struct us_internal_ssl_socket_t *socket =
|
||||
(struct us_internal_ssl_socket_t *)us_socket_context_adopt_socket(
|
||||
0, new_context, s,
|
||||
sizeof(void*),
|
||||
(sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t)) + sizeof(void*));
|
||||
socket->ssl = NULL;
|
||||
socket->ssl_write_wants_read = 0;
|
||||
@@ -2058,7 +2062,7 @@ struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
struct us_socket_t *s, struct us_bun_socket_context_options_t options,
|
||||
struct us_socket_events_t events, int socket_ext_size) {
|
||||
struct us_socket_events_t events, int old_socket_ext_size, int socket_ext_size) {
|
||||
/* Cannot wrap a closed socket */
|
||||
if (us_socket_is_closed(0, s)) {
|
||||
return NULL;
|
||||
@@ -2163,6 +2167,7 @@ us_socket_context_on_socket_connect_error(
|
||||
struct us_internal_ssl_socket_t *socket =
|
||||
(struct us_internal_ssl_socket_t *)us_socket_context_adopt_socket(
|
||||
0, context, s,
|
||||
old_socket_ext_size,
|
||||
sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) +
|
||||
socket_ext_size);
|
||||
socket->ssl = NULL;
|
||||
|
||||
@@ -228,8 +228,8 @@ void us_loop_run(struct us_loop_t *loop) {
|
||||
// > Instead, the filter will aggregate the events into a single kevent struct
|
||||
// Note: EV_ERROR only sets the error in data as part of changelist. Not in this call!
|
||||
int events = 0
|
||||
| ((filter & EVFILT_READ) ? LIBUS_SOCKET_READABLE : 0)
|
||||
| ((filter & EVFILT_WRITE) ? LIBUS_SOCKET_WRITABLE : 0);
|
||||
| ((filter == EVFILT_READ) ? LIBUS_SOCKET_READABLE : 0)
|
||||
| ((filter == EVFILT_WRITE) ? LIBUS_SOCKET_WRITABLE : 0);
|
||||
const int error = (flags & (EV_ERROR)) ? ((int)fflags || 1) : 0;
|
||||
const int eof = (flags & (EV_EOF));
|
||||
#endif
|
||||
@@ -325,7 +325,7 @@ void us_internal_loop_update_pending_ready_polls(struct us_loop_t *loop, struct
|
||||
int num_entries_possibly_remaining = 1;
|
||||
#else
|
||||
/* Ready polls may contain same poll twice under kqueue, as one poll may hold two filters */
|
||||
int num_entries_possibly_remaining = 2;//((old_events & LIBUS_SOCKET_READABLE) ? 1 : 0) + ((old_events & LIBUS_SOCKET_WRITABLE) ? 1 : 0);
|
||||
int num_entries_possibly_remaining = 2;
|
||||
#endif
|
||||
|
||||
/* Todo: for kqueue if we track things in us_change_poll it is possible to have a fast path with no seeking in cases of:
|
||||
@@ -360,11 +360,11 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
if(!is_readable && !is_writable) {
|
||||
if(!(old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
// if we are not reading or writing, we need to add writable to receive FIN
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, EV_ADD, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, EV_ADD | EV_ONESHOT, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
} else if ((new_events & LIBUS_SOCKET_WRITABLE) != (old_events & LIBUS_SOCKET_WRITABLE)) {
|
||||
/* Do they differ in writable? */
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD | EV_ONESHOT : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
|
||||
}
|
||||
int ret;
|
||||
do {
|
||||
@@ -377,22 +377,30 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
|
||||
}
|
||||
#endif
|
||||
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int ext_size) {
|
||||
int events = us_poll_events(p);
|
||||
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop, unsigned int old_ext_size, unsigned int ext_size) {
|
||||
|
||||
struct us_poll_t *new_p = us_realloc(p, sizeof(struct us_poll_t) + ext_size);
|
||||
if (p != new_p) {
|
||||
unsigned int old_size = sizeof(struct us_poll_t) + old_ext_size;
|
||||
unsigned int new_size = sizeof(struct us_poll_t) + ext_size;
|
||||
if(new_size <= old_size) return p;
|
||||
|
||||
struct us_poll_t *new_p = us_calloc(1, new_size);
|
||||
memcpy(new_p, p, old_size);
|
||||
|
||||
/* Increment poll count for the new poll - the old poll will be freed separately
|
||||
* which decrements the count, keeping the total correct */
|
||||
loop->num_polls++;
|
||||
|
||||
int events = us_poll_events(p);
|
||||
#ifdef LIBUS_USE_EPOLL
|
||||
/* Hack: forcefully update poll by stripping away already set events */
|
||||
new_p->state.poll_type = us_internal_poll_type(new_p);
|
||||
us_poll_change(new_p, loop, events);
|
||||
/* Hack: forcefully update poll by stripping away already set events */
|
||||
new_p->state.poll_type = us_internal_poll_type(new_p);
|
||||
us_poll_change(new_p, loop, events);
|
||||
#else
|
||||
/* Forcefully update poll by resetting them with new_p as user data */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p);
|
||||
#endif /* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events);
|
||||
}
|
||||
/* Forcefully update poll by resetting them with new_p as user data */
|
||||
kqueue_change(loop->fd, new_p->state.fd, 0, LIBUS_SOCKET_WRITABLE | LIBUS_SOCKET_READABLE, new_p);
|
||||
#endif
|
||||
/* This is needed for epoll also (us_change_poll doesn't update the old poll) */
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, new_p, events, events);
|
||||
|
||||
return new_p;
|
||||
}
|
||||
@@ -444,7 +452,7 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
kqueue_change(loop->fd, p->state.fd, old_events, events, p);
|
||||
#endif
|
||||
/* Set all removed events to null-polls in pending ready poll list */
|
||||
// us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
us_internal_loop_update_pending_ready_polls(loop, p, p, old_events, events);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -71,6 +71,11 @@ void us_poll_init(struct us_poll_t *p, LIBUS_SOCKET_DESCRIPTOR fd,
|
||||
}
|
||||
|
||||
void us_poll_free(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
// poll was resized and dont own uv_poll_t anymore
|
||||
if(!p->uv_p) {
|
||||
free(p);
|
||||
return;
|
||||
}
|
||||
/* The idea here is like so; in us_poll_stop we call uv_close after setting
|
||||
* data of uv-poll to 0. This means that in close_cb_free we call free on 0
|
||||
* with does nothing, since us_poll_stop should not really free the poll.
|
||||
@@ -86,6 +91,7 @@ void us_poll_free(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
}
|
||||
|
||||
void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
if(!p->uv_p) return;
|
||||
p->poll_type = us_internal_poll_type(p) |
|
||||
((events & LIBUS_SOCKET_READABLE) ? POLL_TYPE_POLLING_IN : 0) |
|
||||
((events & LIBUS_SOCKET_WRITABLE) ? POLL_TYPE_POLLING_OUT : 0);
|
||||
@@ -99,6 +105,7 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
}
|
||||
|
||||
void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
if(!p->uv_p) return;
|
||||
if (us_poll_events(p) != events) {
|
||||
p->poll_type =
|
||||
us_internal_poll_type(p) |
|
||||
@@ -109,6 +116,7 @@ void us_poll_change(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
}
|
||||
|
||||
void us_poll_stop(struct us_poll_t *p, struct us_loop_t *loop) {
|
||||
if(!p->uv_p) return;
|
||||
uv_poll_stop(p->uv_p);
|
||||
|
||||
/* We normally only want to close the poll here, not free it. But if we stop
|
||||
@@ -217,10 +225,20 @@ struct us_poll_t *us_create_poll(struct us_loop_t *loop, int fallthrough,
|
||||
/* If we update our block position we have to update the uv_poll data to point
|
||||
* to us */
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
|
||||
unsigned int ext_size) {
|
||||
unsigned int old_ext_size, unsigned int ext_size) {
|
||||
|
||||
// cannot resize if we dont own uv_poll_t
|
||||
if(!p->uv_p) return p;
|
||||
|
||||
unsigned int old_size = sizeof(struct us_poll_t) + old_ext_size;
|
||||
unsigned int new_size = sizeof(struct us_poll_t) + ext_size;
|
||||
if(new_size <= old_size) return p;
|
||||
|
||||
struct us_poll_t *new_p = calloc(1, new_size);
|
||||
memcpy(new_p, p, old_size);
|
||||
|
||||
struct us_poll_t *new_p = realloc(p, sizeof(struct us_poll_t) + ext_size);
|
||||
new_p->uv_p->data = new_p;
|
||||
p->uv_p = NULL;
|
||||
|
||||
return new_p;
|
||||
}
|
||||
|
||||
@@ -170,6 +170,14 @@ struct us_socket_flags {
|
||||
unsigned char low_prio_state: 2;
|
||||
/* If true, the socket should be read using readmsg to support receiving file descriptors */
|
||||
bool is_ipc: 1;
|
||||
/* If true, the socket has been closed */
|
||||
bool is_closed: 1;
|
||||
/* If true, the socket was reallocated during adoption */
|
||||
bool adopted: 1;
|
||||
/* If true, the socket is a TLS socket */
|
||||
bool is_tls: 1;
|
||||
/* If true, the last write to this socket failed (would block) */
|
||||
bool last_write_failed: 1;
|
||||
|
||||
} __attribute__((packed));
|
||||
|
||||
@@ -435,11 +443,11 @@ void us_internal_ssl_socket_shutdown(us_internal_ssl_socket_r s);
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_adopt_socket(
|
||||
us_internal_ssl_socket_context_r context,
|
||||
us_internal_ssl_socket_r s, int ext_size);
|
||||
us_internal_ssl_socket_r s, int old_ext_size, int ext_size);
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
us_socket_r s, struct us_bun_socket_context_options_t options,
|
||||
struct us_socket_events_t events, int socket_ext_size);
|
||||
struct us_socket_events_t events, int old_socket_ext_size, int socket_ext_size);
|
||||
struct us_internal_ssl_socket_context_t *
|
||||
us_internal_create_child_ssl_socket_context(
|
||||
us_internal_ssl_socket_context_r context, int context_ext_size);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user