Compare commits

..

6 Commits

Author SHA1 Message Date
Don Isaac
c4750e2e43 Merge branch 'main' of github.com:oven-sh/bun into don/refactor/debug-module 2025-03-03 12:56:04 -08:00
Don Isaac
fe793b4f9f fix assertComptime call 2025-03-03 12:55:59 -08:00
Don Isaac
7895f9dad0 refactor: add debug module 2025-03-02 13:50:01 -08:00
Don Isaac
e11cefba2e fix assertWithLocation on windows 2025-03-02 13:08:29 -08:00
DonIsaac
4be76c491c bun run zig-format 2025-03-02 20:32:32 +00:00
Don Isaac
a898b1dbea chore: add assertf and assertRelease 2025-03-02 12:29:18 -08:00
2648 changed files with 126767 additions and 251855 deletions

View File

@@ -1,12 +1,12 @@
ARG LLVM_VERSION="19"
ARG REPORTED_LLVM_VERSION="19.1.7"
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION

View File

@@ -107,9 +107,9 @@ const buildPlatforms = [
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
];
@@ -134,9 +134,9 @@ const testPlatforms = [
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
];
@@ -323,7 +323,6 @@ function getCppAgent(platform, options) {
*/
function getZigAgent(platform, options) {
const { arch } = platform;
return {
queue: "build-zig",
};
@@ -384,21 +383,15 @@ function getBuildEnv(target, options) {
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
const isMusl = abi === "musl";
let CMAKE_BUILD_TYPE = release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo";
if (isMusl && release) {
CMAKE_BUILD_TYPE = "MinSizeRel";
}
return {
CMAKE_BUILD_TYPE,
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
CANARY_REVISION: revision,
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
ENABLE_LOGS: release ? "OFF" : "ON",
ABI: isMusl ? "musl" : undefined,
ABI: abi === "musl" ? "musl" : undefined,
CMAKE_TLS_VERIFY: "0",
};
}
@@ -471,7 +464,7 @@ function getBuildZigStep(platform, options) {
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
timeout_in_minutes: 35,
timeout_in_minutes: 25,
};
}

View File

@@ -201,8 +201,6 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip

View File

@@ -1,14 +1,13 @@
---
description: JavaScript class implemented in C++
globs: *.cpp
alwaysApply: false
---
# Implementing JavaScript classes in C++
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSBinding.cpp](mdc:src/bun.js/bindings/NodeFSBinding.cpp).
- class FooPrototype : public JSC::JSNonFinalObject
- class FooConstructor : public JSC::InternalFunction
@@ -19,7 +18,6 @@ If there are C++ fields on the Foo class, the Foo class will need an iso subspac
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
Generally, defining the subspace looks like this:
```c++
class Foo : public JSC::DestructibleObject {
@@ -47,7 +45,6 @@ It's better to put it in the .cpp file instead of the .h file, when possible.
## Defining properties
Define properties on the prototype. Use a const HashTableValues like this:
```C++
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
@@ -161,7 +158,6 @@ void JSX509CertificatePrototype::finishCreation(VM& vm)
```
### Getter definition:
```C++
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
@@ -216,6 +212,7 @@ JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * glo
}
```
### Constructor definition
```C++
@@ -262,6 +259,7 @@ private:
};
```
### Structure caching
If there's a class, prototype, and constructor:
@@ -281,7 +279,6 @@ void GlobalObject::finishCreation(VM& vm) {
```
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
{
@@ -304,12 +301,11 @@ If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` inst
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
});
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
});
```
Then, implement the function that creates the structure:
@@ -320,7 +316,7 @@ Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalO
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
@@ -329,6 +325,7 @@ Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalO
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++
@@ -381,14 +378,12 @@ extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalOb
```
Zig:
```zig
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
```
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
To create an object (instance) of a JS class defined in C++ from Zig, follow the __toJS convention like this:
```c++
// X509* is whatever we need to create the object
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
@@ -400,13 +395,12 @@ extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509*
```
And from Zig:
```zig
const X509 = opaque {
// ... class
// ... class
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
return Bun__X509__toJS(globalObject, this);
}

View File

@@ -1,498 +0,0 @@
---
description: How Zig works with JavaScriptCore bindings generator
globs:
alwaysApply: false
---
# Bun's JavaScriptCore Class Bindings Generator
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
## Architecture Overview
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
1. **Zig Implementation** (.zig files)
2. **JavaScript Interface Definition** (.classes.ts files)
3. **Generated Code** (C++/Zig files that connect everything)
## Class Definition Files
### JavaScript Interface (.classes.ts)
The `.classes.ts` files define the JavaScript API using a declarative approach:
```typescript
// Example: encoding.classes.ts
define({
name: "TextDecoder",
constructor: true,
JSType: "object",
finalize: true,
proto: {
decode: {
// Function definition
args: 1,
},
encoding: {
// Getter with caching
getter: true,
cache: true,
},
fatal: {
// Read-only property
getter: true,
},
ignoreBOM: {
// Read-only property
getter: true,
}
}
});
```
Each class definition specifies:
- The class name
- Whether it has a constructor
- JavaScript type (object, function, etc.)
- Properties and methods in the `proto` field
- Caching strategy for properties
- Finalization requirements
### Zig Implementation (.zig)
The Zig files implement the native functionality:
```zig
// Example: TextDecoder.zig
pub const TextDecoder = struct {
// Expose generated bindings as `js` namespace with trait conversion methods
pub const js = JSC.Codegen.JSTextDecoder;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// Internal state
encoding: []const u8,
fatal: bool,
ignoreBOM: bool,
// Constructor implementation - note use of globalObject
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*TextDecoder {
// Implementation
return bun.new(TextDecoder, .{
// Fields
});
}
// Prototype methods - note return type includes JSError
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getters
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
}
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsBoolean(this.fatal);
}
// Cleanup - note standard pattern of using deinit/deref
fn deinit(this: *TextDecoder) void {
// Release any retained resources
// Free the pointer at the end.
bun.destroy(this);
}
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
pub fn finalize(this: *TextDecoder) void {
this.deinit();
}
};
```
Key components in the Zig file:
- The struct containing native state
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
- Consistent use of `globalObject` parameter name instead of `ctx`
- Methods matching the JavaScript interface
- Getters/setters for properties
- Proper resource cleanup pattern with `deinit()` and `finalize()`
## Code Generation System
The binding generator produces C++ code that connects JavaScript and Zig:
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
3. **Method Binding**: Connects JS function calls to Zig implementations
4. **Type Conversion**: Converts between JS values and Zig types
5. **Property Caching**: Implements the caching system for properties
The generated C++ code includes:
- A JSC wrapper class (`JSTextDecoder`)
- A prototype class (`JSTextDecoderPrototype`)
- A constructor function (`JSTextDecoderConstructor`)
- Function bindings (`TextDecoderPrototype__decodeCallback`)
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
## CallFrame Access
The `CallFrame` object provides access to JavaScript execution context:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Get arguments
const input = callFrame.argument(0);
const options = callFrame.argument(1);
// Get this value
const thisValue = callFrame.thisValue();
// Implementation with error handling
if (input.isUndefinedOrNull()) {
return globalObject.throw("Input cannot be null or undefined", .{});
}
// Return value or throw error
return JSC.JSValue.jsString(globalObject, "result");
}
```
CallFrame methods include:
- `argument(i)`: Get the i-th argument
- `argumentCount()`: Get the number of arguments
- `thisValue()`: Get the `this` value
- `callee()`: Get the function being called
## Property Caching and GC-Owned Values
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
```typescript
encoding: {
getter: true,
cache: true, // Enable caching
}
```
### C++ Implementation
In the generated C++ code, caching uses JSC's WriteBarrier:
```cpp
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
auto& vm = JSC::getVM(lexicalGlobalObject);
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
auto throwScope = DECLARE_THROW_SCOPE(vm);
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
// Check for cached value and return if present
if (JSValue cachedValue = thisObject->m_encoding.get())
return JSValue::encode(cachedValue);
// Get value from Zig implementation
JSC::JSValue result = JSC::JSValue::decode(
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
);
RETURN_IF_EXCEPTION(throwScope, {});
// Store in cache for future access
thisObject->m_encoding.set(vm, thisObject, result);
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
}
```
### Zig Accessor Functions
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
```zig
// External function declarations
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
/// `TextDecoder.encoding` setter
/// This value will be visited by the garbage collector.
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
JSC.markBinding(@src());
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
}
/// `TextDecoder.encoding` getter
/// This value will be visited by the garbage collector.
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
JSC.markBinding(@src());
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
if (result == .zero)
return null;
return result;
}
```
### Benefits of GC-Owned Values
This system provides several key benefits:
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
3. **Consistent Access**: Zig code can easily get/set these cached JS values
4. **Performance**: Cached values avoid repeated computation or serialization
### Use Cases
GC-owned cached values are particularly useful for:
1. **Computed Properties**: Store expensive computation results
2. **Lazily Created Objects**: Create objects only when needed, then cache them
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
4. **Memoization**: Cache results based on input parameters
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
## Memory Management and Finalization
The binding system handles memory management across the JavaScript/Zig boundary:
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
Bun uses a consistent pattern for resource cleanup:
```zig
// Resource cleanup method - separate from finalization
pub fn deinit(this: *TextDecoder) void {
// Release resources like strings
this._encoding.deref(); // String deref pattern
// Free any buffers
if (this.buffer) |buffer| {
bun.default_allocator.free(buffer);
}
}
// Called by the GC when object is collected
pub fn finalize(this: *TextDecoder) void {
JSC.markBinding(@src()); // For debugging
this.deinit(); // Clean up resources
bun.default_allocator.destroy(this); // Free the object itself
}
```
Some objects that hold references to other JS objects use `.deref()` instead:
```zig
pub fn finalize(this: *SocketAddress) void {
JSC.markBinding(@src());
this._presentation.deref(); // Release references
this.destroy();
}
```
## Error Handling with JSError
Bun uses `bun.JSError!JSValue` return type for proper error handling:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Throwing an error
if (callFrame.argumentCount() < 1) {
return globalObject.throw("Missing required argument", .{});
}
// Or returning a success value
return JSC.JSValue.jsString(globalObject, "Success!");
}
```
This pattern allows Zig functions to:
1. Return JavaScript values on success
2. Throw JavaScript exceptions on error
3. Propagate errors automatically through the call stack
## Type Safety and Error Handling
The binding system includes robust error handling:
```cpp
// Example of type checking in generated code
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
scope.throwException(lexicalGlobalObject,
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
return {};
}
```
## Prototypal Inheritance
The binding system creates proper JavaScript prototype chains:
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
2. **Prototype**: JSTextDecoderPrototype with methods and properties
3. **Instances**: Each JSTextDecoder instance with __proto__ pointing to prototype
This ensures JavaScript inheritance works as expected:
```cpp
// From generated code
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
{
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
// Set up the prototype chain
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
ASSERT(inherits(info()));
}
```
## Performance Considerations
The binding system is optimized for performance:
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
## Creating a New Class Binding
To create a new class binding in Bun:
1. **Define the class interface** in a `.classes.ts` file:
```typescript
define({
name: "MyClass",
constructor: true,
finalize: true,
proto: {
myMethod: {
args: 1,
},
myProperty: {
getter: true,
cache: true,
}
}
});
```
2. **Implement the native functionality** in a `.zig` file:
```zig
pub const MyClass = struct {
// Generated bindings
pub const js = JSC.Codegen.JSMyClass;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// State
value: []const u8,
pub const new = bun.TrivialNew(@This());
// Constructor
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*MyClass {
const arg = callFrame.argument(0);
// Implementation
}
// Method
pub fn myMethod(
this: *MyClass,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getter
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsString(globalObject, this.value);
}
// Resource cleanup
pub fn deinit(this: *MyClass) void {
// Clean up resources
}
pub fn finalize(this: *MyClass) void {
this.deinit();
bun.destroy(this);
}
};
```
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
- C++ class definitions
- Method and property bindings
- Memory management utilities
- GC integration code
## Generated Code Structure
The binding generator produces several components:
### 1. C++ Classes
For each Zig class, the system generates:
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
### 2. C++ Methods and Properties
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
- **Initialization Functions**: `finishCreation` methods for setting up the class
### 3. Zig Bindings
- **External Function Declarations**:
```zig
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
```
- **Cached Value Accessors**:
```zig
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
```
- **Constructor Helpers**:
```zig
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
```
### 4. GC Integration
- **Memory Cost Calculation**: `estimatedSize` method
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.

View File

@@ -1,8 +0,0 @@
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
# avoid mucking up blames.
#
# To configure git to use this, run:
#
# git config blame.ignoreRevsFile .git-blame-ignore-revs
#
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)

View File

@@ -28,7 +28,7 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
-->

View File

@@ -11,8 +11,8 @@ on:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-format:

View File

@@ -11,8 +11,8 @@ on:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-tidy:

View File

@@ -5,7 +5,8 @@ on:
workflow_dispatch:
env:
BUN_VERSION: "1.2.10"
BUN_VERSION: "1.2.0"
OXLINT_VERSION: "0.15.0"
jobs:
lint-js:
@@ -18,4 +19,4 @@ jobs:
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Lint
run: bun lint
run: bunx oxlint --config oxlint.json --quiet --format github

View File

@@ -1,55 +0,0 @@
name: Packages CI
on:
push:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
pull_request:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
env:
BUN_VERSION: "canary"
jobs:
bun-plugin-svelte:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install dependencies
run: |
bun install
pushd ./packages/bun-plugin-svelte && bun install
- name: Lint
run: |
bunx oxlint@0.15 --format github --deny-warnings
bunx prettier --config ../../.prettierrc --check .
working-directory: ./packages/bun-plugin-svelte
- name: Check types
run: bun check:types
working-directory: ./packages/bun-plugin-svelte
- name: Test
run: bun test
working-directory: ./packages/bun-plugin-svelte

View File

@@ -3,6 +3,7 @@ name: Lint
permissions:
contents: read
env:
LLVM_VERSION: 16
BUN_VERSION: "1.2.0"
on:

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,82 +0,0 @@
name: Daily Root Certs Update Check
on:
schedule:
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
workflow_dispatch: # Allows manual trigger
jobs:
check-and-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Generate root certs and capture output
id: generate-certs
run: |
cd packages/bun-usockets/
OUTPUT=$(bun generate-root-certs.mjs -v)
echo "cert_output<<EOF" >> $GITHUB_ENV
echo "$OUTPUT" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Check for changes and stage files
id: check-changes
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Found changes, staging modified files..."
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Get list of modified files and add them
git status --porcelain | while read -r status file; do
# Remove leading status and whitespace
file=$(echo "$file" | sed 's/^.* //')
echo "Adding changed file: $file"
git add "$file"
done
echo "changes=true" >> $GITHUB_OUTPUT
# Store the list of changed files
CHANGED_FILES=$(git status --porcelain)
echo "changed_files<<EOF" >> $GITHUB_ENV
echo "$CHANGED_FILES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No changes detected"
echo "changes=false" >> $GITHUB_OUTPUT
fi
- name: Create Pull Request
if: steps.check-changes.outputs.changes == 'true'
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
title: "update(root_certs) $(date +'%Y-%m-%d')"
body: |
Automated root certificates update
${{ env.cert_output }}
## Changed Files:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:
- "automation"
- "root-certs"

View File

@@ -106,6 +106,4 @@ jobs:
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

1
.gitignore vendored
View File

@@ -153,7 +153,6 @@ test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
codegen-for-zig-team.tar.gz
# Dependencies
/vendor

View File

@@ -1,16 +1,4 @@
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py
command script delete btjs
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"

51
.vscode/launch.json generated vendored
View File

@@ -22,6 +22,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -37,6 +38,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -58,6 +60,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -73,6 +76,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -88,6 +92,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -103,6 +108,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -119,6 +125,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -140,6 +147,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -161,6 +169,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -179,6 +188,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -193,6 +203,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -210,6 +221,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -224,6 +236,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -240,6 +253,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -261,6 +275,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -282,6 +297,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -297,6 +313,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -312,6 +329,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -327,6 +345,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -342,6 +361,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -358,6 +378,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -379,6 +400,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -399,6 +421,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// bun test [*]
{
@@ -414,6 +437,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -428,6 +452,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -443,6 +468,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -462,6 +488,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
{
"type": "lldb",
@@ -476,6 +503,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
// Windows: bun test [file]
{
@@ -1101,28 +1129,7 @@
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
},
{
"type": "bun",
"name": "[JS] bun test [file]",
"runtime": "${workspaceFolder}/build/debug/bun-debug",
"runtimeArgs": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
},
{
"type": "midas-rr",
"request": "attach",
"name": "rr",
"trace": "Off",
"setupCommands": [
"handle SIGPWR nostop noprint pass",
"source ${workspaceFolder}/misctools/gdb/std_gdb_pretty_printers.py",
"source ${workspaceFolder}/misctools/gdb/zig_gdb_pretty_printers.py",
],
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
},
],
"inputs": [

View File

@@ -141,13 +141,9 @@
"packages/bun-uws/fuzzing": true,
},
"files.associations": {
"*.css": "tailwindcss",
"*.idl": "cpp",
"*.mdc": "markdown",
"array": "cpp",
"ios": "cpp",
"oxlint.json": "jsonc",
"bun.lock": "jsonc",
},
"C_Cpp.files.exclude": {
"**/.vscode": true,

View File

@@ -53,39 +53,39 @@ $ brew install bun
## Install LLVM
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
```bash#macOS (Homebrew)
$ brew install llvm@19
$ brew install llvm@18
```
```bash#Ubuntu/Debian
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
```
```bash#Arch
$ sudo pacman -S llvm clang lld
$ sudo pacman -S llvm clang18 lld
```
```bash#Fedora
$ sudo dnf install llvm clang lld-devel
$ sudo dnf install llvm18 clang18 lld18-devel
```
```bash#openSUSE Tumbleweed
$ sudo zypper install clang19 lld19 llvm19
$ sudo zypper install clang18 lld18 llvm18
```
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
Make sure Clang/LLVM 19 is in your path:
Make sure Clang/LLVM 18 is in your path:
```bash
$ which clang-19
$ which clang-18
```
If not, run this to manually add it:
@@ -94,13 +94,13 @@ If not, run this to manually add it:
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
# use path+="$(brew --prefix llvm@19)/bin" if you are using zsh
$ export PATH="$(brew --prefix llvm@19)/bin:$PATH"
# use path+="$(brew --prefix llvm@18)/bin" if you are using zsh
$ export PATH="$(brew --prefix llvm@18)/bin:$PATH"
```
```bash#Arch
# use fish_add_path if you're using fish
$ export PATH="$PATH:/usr/lib/llvm19/bin"
$ export PATH="$PATH:/usr/lib/llvm18/bin"
```
{% /codetabs %}
@@ -134,16 +134,6 @@ We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-deb
$ bun-debug
```
## Running debug builds
The `bd` package.json script compiles and runs a debug build of Bun, only printing the output of the build process if it fails.
```sh
$ bun bd <args>
$ bun bd test foo.test.ts
$ bun bd ./foo.ts
```
## Code generation scripts
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
@@ -215,30 +205,18 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
# Clone WebKit into ./vendor/WebKit
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
$ git -C vendor/WebKit checkout <commit_hash>
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
$ make jsc-debug
# Build bun with the local JSC build
$ bun run build:local
```
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
## Troubleshooting
### 'span' file not found on Ubuntu
@@ -260,7 +238,7 @@ The issue may manifest when initially running `bun setup` as Clang being unable
```
The C++ compiler
"/usr/bin/clang++-19"
"/usr/bin/clang++-18"
is not able to compile a simple test program.
```

2
LATEST
View File

@@ -1 +1 @@
1.2.10
1.2.4

View File

@@ -91,9 +91,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-19 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-19 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-19 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-18 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-18 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-18 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -117,14 +117,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@19)
LLVM_PREFIX = $(shell brew --prefix llvm@18)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@19' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@18' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -164,7 +164,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-19-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_RANLIB=$(which llvm-18-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
@@ -191,7 +191,7 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-19 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-18 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
@@ -255,7 +255,7 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
endif
ifeq ($(OS_NAME),darwin)
_MIMALLOC_OBJECT_FILE = 0
JSC_BUILD_STEPS += jsc-build-mac
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
_MIMALLOC_FILE = libmimalloc.a
_MIMALLOC_INPUT_PATH = libmimalloc.a
@@ -286,7 +286,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-19 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-18 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -674,7 +674,7 @@ endif
.PHONY: assert-deps
assert-deps:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "19" ]; then echo -e "ERROR: clang version >=19 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@19"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "18" ]; then echo -e "ERROR: clang version >=18 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@18"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -924,7 +924,7 @@ bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-debug
jsc-debug: jsc-build-debug
.PHONY: jsc-build

View File

@@ -1,44 +0,0 @@
import { bench, run } from "../runner.mjs";
import crypto from "node:crypto";
import { Buffer } from "node:buffer";
const keylen = { "aes-128-gcm": 16, "aes-192-gcm": 24, "aes-256-gcm": 32 };
const sizes = [4 * 1024, 1024 * 1024];
const ciphers = ["aes-128-gcm", "aes-192-gcm", "aes-256-gcm"];
const messages = {};
sizes.forEach(size => {
messages[size] = Buffer.alloc(size, "b");
});
const keys = {};
ciphers.forEach(cipher => {
keys[cipher] = crypto.randomBytes(keylen[cipher]);
});
// Fixed IV and AAD
const iv = crypto.randomBytes(12);
const associate_data = Buffer.alloc(16, "z");
for (const cipher of ciphers) {
for (const size of sizes) {
const message = messages[size];
const key = keys[cipher];
bench(`${cipher} ${size / 1024}KB`, () => {
const alice = crypto.createCipheriv(cipher, key, iv);
alice.setAAD(associate_data);
const enc = alice.update(message);
alice.final();
const tag = alice.getAuthTag();
const bob = crypto.createDecipheriv(cipher, key, iv);
bob.setAuthTag(tag);
bob.setAAD(associate_data);
bob.update(enc);
bob.final();
});
}
}
await run();

View File

@@ -1,53 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Pre-generate DH params to avoid including setup in benchmarks
const dhSize = 1024; // Reduced from 2048 for faster testing
const dh = crypto.createDiffieHellman(dhSize);
const dhPrime = dh.getPrime();
const dhGenerator = dh.getGenerator();
// Classical Diffie-Hellman
bench("DH - generateKeys", () => {
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
return alice.generateKeys();
});
bench("DH - computeSecret", () => {
// Setup
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
const aliceKey = alice.generateKeys();
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with prime256v1 (P-256)
bench("ECDH-P256 - generateKeys", () => {
const ecdh = crypto.createECDH("prime256v1");
return ecdh.generateKeys();
});
bench("ECDH-P256 - computeSecret", () => {
// Setup
const alice = crypto.createECDH("prime256v1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("prime256v1");
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with secp384r1 (P-384)
bench("ECDH-P384 - computeSecret", () => {
const alice = crypto.createECDH("secp384r1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("secp384r1");
const bobKey = bob.generateKeys();
return alice.computeSecret(bobKey);
});
await run();

View File

@@ -1,44 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
function generateTestKeyPairs() {
const curves = crypto.getCurves();
const keys = {};
for (const curve of curves) {
const ecdh = crypto.createECDH(curve);
ecdh.generateKeys();
keys[curve] = {
compressed: ecdh.getPublicKey("hex", "compressed"),
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
instance: ecdh,
};
}
return keys;
}
const testKeys = generateTestKeyPairs();
bench("ECDH key format - P256 compressed to uncompressed", () => {
const publicKey = testKeys["prime256v1"].compressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P256 uncompressed to compressed", () => {
const publicKey = testKeys["prime256v1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
});
bench("ECDH key format - P384 compressed to uncompressed", () => {
const publicKey = testKeys["secp384r1"].compressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P384 uncompressed to compressed", () => {
const publicKey = testKeys["secp384r1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Sample keys with different lengths
const keys = {
short: "secret",
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
};
// Test parameters
const salts = ["", "salt"];
const infos = ["", "info"];
const hashes = ["sha256", "sha512"];
const sizes = [10, 1024];
// Benchmark sync HKDF
for (const hash of hashes) {
for (const keyName of Object.keys(keys)) {
const key = keys[keyName];
for (const size of sizes) {
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
return crypto.hkdfSync(hash, key, "salt", "info", size);
});
}
}
}
// Benchmark different combinations of salt and info
for (const salt of salts) {
for (const info of infos) {
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
});
}
}
// Benchmark async HKDF (using promises for cleaner benchmark)
// Note: async benchmarks in Mitata require returning a Promise
for (const hash of hashes) {
bench(`hkdf ${hash} async`, async () => {
return new Promise((resolve, reject) => {
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
if (err) reject(err);
else resolve(derivedKey);
});
});
});
}
await run();

View File

@@ -1,43 +0,0 @@
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
import { bench, run } from "../runner.mjs";
const prime512 = generatePrimeSync(512);
const prime2048 = generatePrimeSync(2048);
bench("checkPrimeSync 512", () => {
return checkPrimeSync(prime512);
});
bench("checkPrimeSync 2048", () => {
return checkPrimeSync(prime2048);
});
bench("checkPrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
await Promise.all(promises);
});
bench("checkPrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
await Promise.all(promises);
});
bench("generatePrimeSync 512", () => {
return generatePrimeSync(512);
});
bench("generatePrimeSync 2048", () => {
return generatePrimeSync(2048);
});
bench("generatePrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
await Promise.all(promises);
});
bench("generatePrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
await Promise.all(promises);
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "crypto";
import { bench, run } from "../runner.mjs";
bench("randomInt - sync", () => {
crypto.randomInt(1000);
});
bench("randomInt - async", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomInt(1000, () => {
resolve();
});
await promise;
});
bench("randonBytes - 32", () => {
crypto.randomBytes(32);
});
bench("randomBytes - 256", () => {
crypto.randomBytes(256);
});
const buf = Buffer.alloc(256);
bench("randomFill - 32", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 32, () => {
resolve();
});
await promise;
});
bench("randomFill - 256", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 256, () => {
resolve();
});
await promise;
});
bench("randomFillSync - 32", () => {
crypto.randomFillSync(buf, 0, 32);
});
bench("randomFillSync - 256", () => {
crypto.randomFillSync(buf, 0, 256);
});
await run();

View File

@@ -1,7 +1,7 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext"],
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",

View File

@@ -12,7 +12,6 @@
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fastify": "^5.0.0",
"fdir": "^6.1.0",
"mitata": "^1.0.25",
"react": "^18.3.1",

View File

@@ -1,7 +1,7 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext"],
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",

View File

@@ -1,17 +0,0 @@
import { group as suite, bench, run } from "mitata";
const bigBuf = Buffer.alloc(1024 * 256);
// Fill with letter "A" encoded as UTF16
for (let i = 0; i < bigBuf.length; i += 2) {
bigBuf[i] = 65; // ASCII/UTF16 code for 'A'
bigBuf[i + 1] = 0; // High byte for UTF16
}
var asUTF16LE = bigBuf.toString("utf16le");
// await run();
console.time("Buffer.from(bigBuf, 'utf16le')");
for (let i = 0; i < 100000; i++) {
bigBuf.asciiWrite(asUTF16LE, 0, asUTF16LE.length);
}
console.timeEnd("Buffer.from(bigBuf, 'utf16le')");

View File

@@ -1,13 +0,0 @@
import express from "express";
const app = express();
const port = 3000;
var i = 0;
app.get("/", (req, res) => {
res.send("Hello World!" + i++);
});
app.listen(port, () => {
console.log(`Express app listening at http://localhost:${port}`);
});

View File

@@ -1,20 +0,0 @@
import Fastify from "fastify";
const fastify = Fastify({
logger: false,
});
fastify.get("/", async (request, reply) => {
return { hello: "world" };
});
const start = async () => {
try {
await fastify.listen({ port: 3000 });
} catch (err) {
fastify.log.error(err);
process.exit(1);
}
};
start();

View File

@@ -1,28 +0,0 @@
import ioredis from "ioredis";
const redis = process.argv.includes("--redis=native")
? Bun.redis
: new ioredis("redis://localhost:6379", {
enableAutoPipelining: true,
});
const isBun = globalThis.Bun && redis === Bun.redis;
for (let count of [100, 1000]) {
function iterate() {
const promises = new Array(count);
for (let i = 0; i < count; i++) {
promises[i] = redis.get("greeting");
}
return Promise.all(promises);
}
const label = isBun ? `Bun.redis` : `ioredis`;
console.time(`GET 'greeting' batches of ${count} - ${label} (${count} iterations)`);
for (let i = 0; i < 1000; i++) {
await iterate();
}
console.timeEnd(`GET 'greeting' batches of ${count} - ${label} (${count} iterations)`);
}
process.exit(0);

View File

@@ -1,14 +0,0 @@
import { bench, run } from "../runner.mjs";
const url = "http://localhost:3000/";
const clonable = new Request(url);
bench("request.clone().method", () => {
return clonable.clone().method;
});
bench("new Request(url).method", () => {
return new Request(url).method;
});
await run();

324
build.zig
View File

@@ -4,7 +4,7 @@ const builtin = @import("builtin");
const Build = std.Build;
const Step = Build.Step;
const Compile = Step.Compile;
const LazyPath = Build.LazyPath;
const LazyPath = Step.LazyPath;
const Target = std.Target;
const ResolvedTarget = std.Build.ResolvedTarget;
const CrossTarget = std.zig.CrossTarget;
@@ -18,21 +18,21 @@ const OperatingSystem = @import("src/env.zig").OperatingSystem;
const pathRel = fs.path.relative;
/// When updating this, make sure to adjust SetupZig.cmake
const recommended_zig_version = "0.14.0";
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
const recommended_zig_version = "0.14.0-dev.2987+183bb8b08";
// comptime {
// if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
// @compileError(
// "" ++
// "Bun requires Zig version " ++ recommended_zig_version ++ ", but you have " ++
// builtin.zig_version_string ++ ". This is automatically configured via Bun's " ++
// "CMake setup. You likely meant to run `bun run build`. If you are trying to " ++
// "upgrade the Zig compiler, edit ZIG_COMMIT in cmake/tools/SetupZig.cmake or " ++
// "comment this error out.",
// );
// }
// }
comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
"Bun requires Zig version " ++ recommended_zig_version ++ " (found " ++
builtin.zig_version_string ++ "). This is " ++
"automatically configured via Bun's CMake setup. You likely meant to run " ++
"`bun setup`. If you are trying to upgrade the Zig compiler, " ++
"run `./scripts/download-zig.sh master` or comment this message out.",
);
}
}
const zero_sha = "0000000000000000000000000000000000000000";
@@ -93,7 +93,6 @@ const BunBuildOptions = struct {
opts.addOption(bool, "baseline", this.isBaseline());
opts.addOption(bool, "enable_logs", this.enable_logs);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
const mod = opts.createModule();
this.cached_options_module = mod;
@@ -154,6 +153,13 @@ pub fn build(b: *Build) !void {
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
checked_file_exists = std.AutoHashMap(u64, void).init(b.allocator);
// TODO: Upgrade path for 0.14.0
// b.graph.zig_lib_directory = brk: {
// const sub_path = "vendor/zig/lib";
// const dir = try b.build_root.handle.openDir(sub_path, .{});
// break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) };
// };
var target_query = b.standardTargetOptionsQueryOnly(.{});
const optimize = b.standardOptimizeOption(.{});
@@ -199,7 +205,10 @@ pub fn build(b: *Build) !void {
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
b.reference_trace = b.reference_trace orelse 32;
b.reference_trace = ref_trace: {
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 24;
break :ref_trace if (trace == 0) null else trace;
};
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
@@ -276,40 +285,6 @@ pub fn build(b: *Build) !void {
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
}
// zig build test
{
var step = b.step("test", "Build Bun's unit test suite");
var o = build_options;
var unit_tests = b.addTest(.{
.name = "bun-test",
.optimize = build_options.optimize,
.root_source_file = b.path("src/unit_test.zig"),
.test_runner = .{ .path = b.path("src/main_test.zig"), .mode = .simple },
.target = build_options.target,
.use_llvm = !build_options.no_llvm,
.use_lld = if (build_options.os == .mac) false else !build_options.no_llvm,
.omit_frame_pointer = false,
.strip = false,
});
configureObj(b, &o, unit_tests);
// Setting `linker_allow_shlib_undefined` causes the linker to ignore
// all undefined symbols. We want this because all we care about is the
// object file Zig creates; we perform our own linking later. There is
// currently no way to make a test build that only creates an object
// file w/o creating an executable.
//
// See: https://github.com/ziglang/zig/issues/23374
unit_tests.linker_allow_shlib_undefined = true;
unit_tests.link_function_sections = true;
unit_tests.link_data_sections = true;
unit_tests.bundle_ubsan_rt = false;
const bin = unit_tests.getEmittedBin();
const obj = bin.dirname().path(b, "bun-test.o");
const cpy_obj = b.addInstallFile(obj, "bun-test.o");
step.dependOn(&cpy_obj.step);
}
// zig build windows-shim
{
var step = b.step("windows-shim", "Build the Windows shim (bun_shim_impl.exe + bun_shim_debug.exe)");
@@ -344,21 +319,7 @@ pub fn build(b: *Build) !void {
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}, &.{ .Debug, .ReleaseFast });
}
// zig build check-all-debug
{
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}, &.{.Debug});
});
}
// zig build check-windows
@@ -366,42 +327,27 @@ pub fn build(b: *Build) !void {
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .windows, .arch = .x86_64 },
}, &.{ .Debug, .ReleaseFast });
});
}
{
const step = b.step("check-macos", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
});
}
{
const step = b.step("check-linux", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
}, &.{ .Debug, .ReleaseFast });
});
}
// zig build translate-c-headers
{
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
for ([_]TargetDescription{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64, .musl = true },
.{ .os = .linux, .arch = .aarch64, .musl = true },
}) |t| {
const resolved = t.resolveTarget(b);
step.dependOn(
&b.addInstallFile(getTranslateC(b, resolved, .Debug), b.fmt("translated-c-headers/{s}.zig", .{
resolved.result.zigTriple(b.allocator) catch @panic("OOM"),
})).step,
);
}
step.dependOn(&b.addInstallFile(getTranslateC(b, b.graph.host, .Debug).getOutput(), "translated-c-headers.zig").step);
}
// zig build enum-extractor
@@ -418,32 +364,22 @@ pub fn build(b: *Build) !void {
}
}
const TargetDescription = struct {
os: OperatingSystem,
arch: Arch,
musl: bool = false,
fn resolveTarget(desc: TargetDescription, b: *Build) std.Build.ResolvedTarget {
return b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(desc.os),
.cpu_arch = desc.arch,
.cpu_model = getCpuModel(desc.os, desc.arch) orelse .determined_by_arch_os,
.os_version_min = getOSVersionMin(desc.os),
.glibc_version = if (desc.musl) null else getOSGlibCVersion(desc.os),
});
}
};
fn addMultiCheck(
pub fn addMultiCheck(
b: *Build,
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const TargetDescription,
optimize: []const std.builtin.OptimizeMode,
to_check: []const struct { os: OperatingSystem, arch: Arch, musl: bool = false },
) void {
for (to_check) |check| {
for (optimize) |mode| {
const check_target = check.resolveTarget(b);
for ([_]std.builtin.Mode{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_arch_os,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = if (check.musl) null else getOSGlibCVersion(check.os),
});
var options: BunBuildOptions = .{
.target = check_target,
.os = check.os,
@@ -467,13 +403,7 @@ fn addMultiCheck(
}
}
fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) LazyPath {
const target = b.resolveTargetQuery(q: {
var query = initial_target.query;
if (query.os_tag == .windows)
query.abi = .gnu;
break :q query;
});
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
const translate_c = b.addTranslateC(.{
.root_source_file = b.path("src/c-headers-for-zig.h"),
.target = target,
@@ -489,72 +419,28 @@ fn getTranslateC(b: *Build, initial_target: std.Build.ResolvedTarget, optimize:
const str, const value = entry;
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
}
if (target.result.os.tag == .windows) {
// translate-c is unable to translate the unsuffixed windows functions
// like `SetCurrentDirectory` since they are defined with an odd macro
// that translate-c doesn't handle.
//
// #define SetCurrentDirectory __MINGW_NAME_AW(SetCurrentDirectory)
//
// In these cases, it's better to just reference the underlying function
// directly: SetCurrentDirectoryW. To make the error better, a post
// processing step is applied to the translate-c file.
//
// Additionally, this step makes it so that decls like NTSTATUS and
// HANDLE point to the standard library structures.
const helper_exe = b.addExecutable(.{
.name = "process_windows_translate_c",
.root_module = b.createModule(.{
.root_source_file = b.path("src/codegen/process_windows_translate_c.zig"),
.target = b.graph.host,
.optimize = .Debug,
}),
});
const in = translate_c.getOutput();
const run = b.addRunArtifact(helper_exe);
run.addFileArg(in);
const out = run.addOutputFileArg("c-headers-for-zig.zig");
return out;
}
return translate_c.getOutput();
return translate_c;
}
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
// Create `@import("bun")`, containing most of Bun's code.
const bun = b.createModule(.{
.root_source_file = b.path("src/bun.zig"),
});
bun.addImport("bun", bun); // allow circular "bun" import
addInternalImports(b, bun, opts);
const root = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
// Root module gets compilation flags. Forwarded as default to dependencies.
.target = opts.target,
.optimize = opts.optimize,
});
root.addImport("bun", bun);
const obj = b.addObject(.{
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
.root_module = root,
.root_source_file = switch (opts.os) {
.wasm => b.path("root_wasm.zig"),
else => b.path("src/main.zig"),
// else => b.path("root_css.zig"),
},
.target = opts.target,
.optimize = opts.optimize,
.use_llvm = !opts.no_llvm,
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
// https://github.com/ziglang/zig/issues/17430
.pic = true,
.omit_frame_pointer = false,
.strip = false, // stripped at the end
});
configureObj(b, opts, obj);
return obj;
}
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Flags on root module get used for the compilation
obj.root_module.omit_frame_pointer = false;
obj.root_module.strip = false; // stripped at the end
// https://github.com/ziglang/zig/issues/17430
obj.root_module.pic = true;
// Object options
obj.use_llvm = !opts.no_llvm;
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
if (opts.enable_asan) {
if (@hasField(Build.Module, "sanitize_address")) {
obj.root_module.sanitize_address = true;
@@ -564,7 +450,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
}
}
obj.bundle_compiler_rt = false;
obj.bundle_ubsan_rt = false;
obj.root_module.omit_frame_pointer = false;
// Link libc
if (opts.os != .wasm) {
@@ -574,7 +460,6 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
// Disable stack probing on x86 so we don't need to include compiler_rt
if (opts.arch.isX86()) {
// TODO: enable on debug please.
obj.root_module.stack_check = false;
obj.root_module.stack_protector = false;
}
@@ -589,18 +474,17 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
obj.root_module.valgrind = true;
}
}
addInternalPackages(b, obj, opts);
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
const translate_c = getTranslateC(b, opts.target, opts.optimize);
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
return obj;
}
const ObjectFormat = enum {
/// Emitting LLVM bc files could allow a stronger LTO pass, however it
/// doesn't yet work. It is left accessible with `-Dobj_format=bc` or in
/// CMake with `-DZIG_OBJECT_FORMAT=bc`.
///
/// To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
/// LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
/// Change to "bc" to experiment, "Invalid record" means it is not valid output.
bc,
/// Emit a .o / .obj file for the bun-zig object.
obj,
};
@@ -630,21 +514,16 @@ fn exists(path: []const u8) bool {
return true;
}
fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
const os = opts.os;
mod.addImport("build_options", opts.buildOptionsModule(b));
const translate_c = getTranslateC(b, opts.target, opts.optimize);
mod.addImport("translated-c-headers", b.createModule(.{ .root_source_file = translate_c }));
const zlib_internal_path = switch (os) {
.windows => "src/deps/zlib.win32.zig",
.linux, .mac => "src/deps/zlib.posix.zig",
else => null,
};
if (zlib_internal_path) |path| {
mod.addAnonymousImport("zlib-internal", .{
obj.root_module.addAnonymousImport("zlib-internal", .{
.root_source_file = b.path(path),
});
}
@@ -654,7 +533,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
.windows => "src/async/windows_event_loop.zig",
else => "src/async/stub_event_loop.zig",
};
mod.addAnonymousImport("async", .{
obj.root_module.addAnonymousImport("async", .{
.root_source_file = b.path(async_path),
});
@@ -702,7 +581,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
entry.import
else
entry.file;
mod.addAnonymousImport(import_path, .{
obj.root_module.addAnonymousImport(import_path, .{
.root_source_file = .{ .cwd_relative = path },
});
}
@@ -712,37 +591,16 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
.{ .import = "completions-zsh", .file = b.path("completions/bun.zsh") },
.{ .import = "completions-fish", .file = b.path("completions/bun.fish") },
}) |entry| {
mod.addAnonymousImport(entry.import, .{
obj.root_module.addAnonymousImport(entry.import, .{
.root_source_file = entry.file,
});
}
if (os == .windows) {
mod.addAnonymousImport("bun_shim_impl.exe", .{
obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{
.root_source_file = opts.windowsShim(b).exe.getEmittedBin(),
});
}
// Finally, make it so all modules share the same import table.
propagateImports(mod) catch @panic("OOM");
}
/// Makes all imports of `source_mod` visible to all of its dependencies.
/// Does not replace existing imports.
fn propagateImports(source_mod: *Module) !void {
var seen = std.AutoHashMap(*Module, void).init(source_mod.owner.graph.arena);
defer seen.deinit();
var queue = std.ArrayList(*Module).init(source_mod.owner.graph.arena);
defer queue.deinit();
try queue.appendSlice(source_mod.import_table.values());
while (queue.pop()) |mod| {
if ((try seen.getOrPut(mod)).found_existing) continue;
try queue.appendSlice(mod.import_table.values());
for (source_mod.import_table.keys(), source_mod.import_table.values()) |k, v|
if (mod.import_table.get(k) == null)
mod.addImport(k, v);
}
}
fn validateGeneratedPath(path: []const u8) void {
@@ -771,34 +629,30 @@ const WindowsShim = struct {
const exe = b.addExecutable(.{
.name = "bun_shim_impl",
.root_module = b.createModule(.{
.root_source_file = path,
.target = target,
.optimize = .ReleaseFast,
.unwind_tables = .none,
.omit_frame_pointer = true,
.strip = true,
.sanitize_thread = false,
.single_threaded = true,
.link_libc = false,
}),
.linkage = .static,
.root_source_file = path,
.target = target,
.optimize = .ReleaseFast,
.use_llvm = true,
.use_lld = true,
.unwind_tables = .none,
.omit_frame_pointer = true,
.strip = true,
.linkage = .static,
.sanitize_thread = false,
.single_threaded = true,
.link_libc = false,
});
const dbg = b.addExecutable(.{
.name = "bun_shim_debug",
.root_module = b.createModule(.{
.root_source_file = path,
.target = target,
.optimize = .Debug,
.single_threaded = true,
.link_libc = false,
}),
.linkage = .static,
.root_source_file = path,
.target = target,
.optimize = .Debug,
.use_llvm = true,
.use_lld = true,
.linkage = .static,
.single_threaded = true,
.link_libc = false,
});
return .{ .exe = exe, .dbg = dbg };

View File

@@ -29,6 +29,7 @@
"name": "bun-types",
"dependencies": {
"@types/node": "*",
"@types/ws": "~8.5.10",
},
"devDependencies": {
"@biomejs/biome": "^1.5.3",
@@ -150,13 +151,13 @@
"@qiwi/npm-registry-client": ["@qiwi/npm-registry-client@8.9.1", "", { "dependencies": { "concat-stream": "^2.0.0", "graceful-fs": "^4.2.4", "normalize-package-data": "~1.0.1 || ^2.0.0 || ^3.0.0", "npm-package-arg": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^8.0.0", "once": "^1.4.0", "request": "^2.88.2", "retry": "^0.12.0", "safe-buffer": "^5.2.1", "semver": "2 >=2.2.1 || 3.x || 4 || 5 || 7", "slide": "^1.1.6", "ssri": "^8.0.0" }, "optionalDependencies": { "npmlog": "2 || ^3.1.0 || ^4.0.0" } }, "sha512-rZF+mG+NfijR0SHphhTLHRr4aM4gtfdwoAMY6we2VGQam8vkN1cxGG1Lg/Llrj8Dd0Mu6VjdFQRyMMRZxtZR2A=="],
"@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="],
"@types/bun": ["@types/bun@1.1.17", "", { "dependencies": { "bun-types": "1.1.44" } }, "sha512-zZt0Kao/8hAwNOXh4bmt8nKbMEd4QD8n7PeTGF+NZTVY5ouXhU/TX7jUj4He1p7mgY+WdplnU1B6MB1j17vdzg=="],
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
"@types/json5": ["@types/json5@0.0.29", "", {}, "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ=="],
"@types/node": ["@types/node@22.13.5", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-+lTU0PxZXn0Dr1NBtC7Y8cR21AJr87dLLU953CWA6pMxxv/UDc7jYAY90upcrie1nRcD6XNG5HOYEDtgW5TxAg=="],
"@types/node": ["@types/node@22.10.7", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-V09KvXxFiutGp6B7XkpaDXlNadZxrzajcY50EuoLIpQ6WWYCSvf19lVIazzfIzQvhUN2HjX12spLojTnhuKlGg=="],
"@types/prop-types": ["@types/prop-types@15.7.12", "", {}, "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q=="],
@@ -164,6 +165,8 @@
"@types/semver": ["@types/semver@7.5.8", "", {}, "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ=="],
"@types/ws": ["@types/ws@8.5.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w=="],
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@7.16.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/type-utils": "7.16.1", "@typescript-eslint/utils": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "@typescript-eslint/parser": "^7.0.0", "eslint": "^8.56.0" } }, "sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A=="],
"@typescript-eslint/parser": ["@typescript-eslint/parser@7.16.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "7.16.1", "@typescript-eslint/types": "7.16.1", "@typescript-eslint/typescript-estree": "7.16.1", "@typescript-eslint/visitor-keys": "7.16.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA=="],
@@ -912,6 +915,8 @@
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
"@types/ws/@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
"@typescript-eslint/visitor-keys/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
@@ -1002,6 +1007,8 @@
"@definitelytyped/utils/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
"@types/ws/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
"are-we-there-yet/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="],

View File

@@ -2,7 +2,3 @@
# https://github.com/oven-sh/bun/issues/16289
[test]
preload = ["./test/js/node/harness.ts", "./test/preload.ts"]
[install]
# Node.js never auto-installs modules.
auto = "disable"

View File

@@ -142,14 +142,6 @@ if(UNIX)
-fno-unwind-tables
-fno-asynchronous-unwind-tables
)
# needed for libuv stubs because they use
# C23 feature which lets you define parameter without
# name
register_compiler_flags(
DESCRIPTION "Allow C23 extensions"
-Wno-c23-extensions
)
endif()
register_compiler_flags(

View File

@@ -419,15 +419,7 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
if(BUILDKITE)
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
if(filename STREQUAL "libbun-profile.a")
# libbun-profile.a is now over 5gb in size, compress it first
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
else()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
endforeach()
@@ -633,7 +625,7 @@ function(register_repository)
set(GIT_PATH ${VENDOR_PATH}/${GIT_NAME})
endif()
set(GIT_EFFECTIVE_OUTPUTS ${GIT_PATH}/.ref)
set(GIT_EFFECTIVE_OUTPUTS)
foreach(output ${GIT_OUTPUTS})
list(APPEND GIT_EFFECTIVE_OUTPUTS ${GIT_PATH}/${output})
endforeach()
@@ -751,17 +743,11 @@ function(register_cmake_command)
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
endif()
set(MAKE_SOURCES)
if(TARGET clone-${MAKE_TARGET})
list(APPEND MAKE_SOURCES ${MAKE_CWD}/.ref)
endif()
register_command(
COMMENT "Configuring ${MAKE_TARGET}"
TARGET configure-${MAKE_TARGET}
COMMAND ${CMAKE_COMMAND} ${MAKE_EFFECTIVE_ARGS}
CWD ${MAKE_CWD}
SOURCES ${MAKE_SOURCES}
OUTPUTS ${MAKE_BUILD_PATH}/CMakeCache.txt
)
@@ -813,7 +799,6 @@ function(register_cmake_command)
TARGETS configure-${MAKE_TARGET}
COMMAND ${CMAKE_COMMAND} ${MAKE_BUILD_ARGS}
CWD ${MAKE_CWD}
SOURCES ${MAKE_SOURCES}
ARTIFACTS ${MAKE_ARTIFACTS}
)

View File

@@ -26,15 +26,6 @@ else()
setx(DEBUG OFF)
endif()
optionx(BUN_TEST BOOL "Build Bun's unit test suite instead of the normal build" DEFAULT OFF)
if (BUN_TEST)
setx(TEST ON)
else()
setx(TEST OFF)
endif()
if(CMAKE_BUILD_TYPE MATCHES "MinSizeRel")
setx(ENABLE_SMOL ON)
endif()
@@ -71,14 +62,7 @@ if(ARCH STREQUAL "x64")
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
endif()
# Disabling logs by default for tests yields faster builds
if (DEBUG AND NOT TEST)
set(DEFAULT_ENABLE_LOGS ON)
else()
set(DEFAULT_ENABLE_LOGS OFF)
endif()
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEFAULT_ENABLE_LOGS})
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)

View File

@@ -29,9 +29,6 @@ else()
endif()
set(ZIG_NAME bootstrap-${ZIG_ARCH}-${ZIG_OS_ABI})
if(ZIG_COMPILER_SAFE)
set(ZIG_NAME ${ZIG_NAME}-ReleaseSafe)
endif()
set(ZIG_FILENAME ${ZIG_NAME}.zip)
if(CMAKE_HOST_WIN32)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/boringssl
COMMIT
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
914b005ef3ece44159dca0ffad74eb42a9f6679f
)
register_cmake_command(

View File

@@ -1,8 +1,8 @@
if(DEBUG)
set(bun bun-debug)
# elseif(ENABLE_SMOL)
# set(bun bun-smol-profile)
# set(bunStrip bun-smol)
elseif(ENABLE_SMOL)
set(bun bun-smol-profile)
set(bunStrip bun-smol)
elseif(ENABLE_VALGRIND)
set(bun bun-valgrind)
elseif(ENABLE_ASSERTIONS)
@@ -12,10 +12,6 @@ else()
set(bunStrip bun)
endif()
if(TEST)
set(bun ${bun}-test)
endif()
set(bunExe ${bun}${CMAKE_EXECUTABLE_SUFFIX})
if(bunStrip)
@@ -532,6 +528,7 @@ file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS}
list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
${CWD}/src/main.zig
${BUN_BINDGEN_ZIG_OUTPUTS}
)
@@ -553,13 +550,7 @@ else()
list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_BAKE_RUNTIME_OUTPUTS})
endif()
if (TEST)
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-test.o)
set(ZIG_STEPS test)
else()
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
set(ZIG_STEPS obj)
endif()
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
@@ -588,10 +579,10 @@ register_command(
GROUP
console
COMMENT
"Building src/*.zig into ${BUN_ZIG_OUTPUT} for ${ZIG_TARGET}"
"Building src/*.zig for ${ZIG_TARGET}"
COMMAND
${ZIG_EXECUTABLE}
build ${ZIG_STEPS}
build obj
${CMAKE_ZIG_FLAGS}
--prefix ${BUILD_PATH}
-Dobj_format=${ZIG_OBJECT_FORMAT}
@@ -605,7 +596,6 @@ register_command(
-Dcodegen_path=${CODEGEN_PATH}
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
--prominent-compile-errors
--summary all
${ZIG_FLAGS_BUN}
ARTIFACTS
${BUN_ZIG_OUTPUT}
@@ -645,8 +635,6 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
${BUN_USOCKETS_SOURCE}/src/eventing/*.c
${BUN_USOCKETS_SOURCE}/src/internal/*.c
${BUN_USOCKETS_SOURCE}/src/crypto/*.c
${CWD}/src/bun.js/bindings/uv-posix-polyfills.c
${CWD}/src/bun.js/bindings/uv-posix-stubs.c
)
if(WIN32)
@@ -750,7 +738,7 @@ endif()
# --- C/C++ Properties ---
set_target_properties(${bun} PROPERTIES
CXX_STANDARD 23
CXX_STANDARD 20
CXX_STANDARD_REQUIRED YES
CXX_EXTENSIONS YES
CXX_VISIBILITY_PRESET hidden
@@ -759,18 +747,6 @@ set_target_properties(${bun} PROPERTIES
VISIBILITY_INLINES_HIDDEN YES
)
if (NOT WIN32)
# Enable precompiled headers
# Only enable in these scenarios:
# 1. NOT in CI, OR
# 2. In CI AND BUN_CPP_ONLY is enabled
if(NOT CI OR (CI AND BUN_CPP_ONLY))
target_precompile_headers(${bun} PRIVATE
"$<$<COMPILE_LANGUAGE:CXX>:${CWD}/src/bun.js/bindings/root.h>"
)
endif()
endif()
# --- C/C++ Includes ---
if(WIN32)
@@ -797,10 +773,6 @@ target_include_directories(${bun} PRIVATE
${NODEJS_HEADERS_PATH}/include
)
if(NOT WIN32)
target_include_directories(${bun} PRIVATE ${CWD}/src/bun.js/bindings/libuv)
endif()
if(LINUX)
include(CheckIncludeFiles)
check_include_files("sys/queue.h" HAVE_SYS_QUEUE_H)
@@ -909,7 +881,6 @@ if(NOT WIN32)
-Werror=sometimes-uninitialized
-Werror=unused
-Wno-unused-function
-Wno-c++23-lambda-attributes
-Wno-nullability-completeness
-Werror
)
@@ -926,15 +897,10 @@ if(NOT WIN32)
-Werror=nonnull
-Werror=move
-Werror=sometimes-uninitialized
-Wno-c++23-lambda-attributes
-Wno-nullability-completeness
-Werror
)
endif()
else()
target_compile_options(${bun} PUBLIC
-Wno-nullability-completeness
)
endif()
# --- Linker options ---
@@ -977,17 +943,28 @@ endif()
if(LINUX)
if(NOT ABI STREQUAL "musl")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
# on arm64
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=log
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
)
else()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=powf
)
endif()
endif()
if(NOT ABI STREQUAL "musl")

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
ebiggers/libdeflate
COMMIT
78051988f96dc8d8916310d8b24021f01bd9e102
733848901289eca058804ca0737f8796875204c8
)
register_cmake_command(

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
cloudflare/lol-html
COMMIT
67f1d4ffd6b74db7e053fb129dcce620193c180d
4f8becea13a0021c8b71abd2dcc5899384973b66
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
7a4d7b8d18f8159a808aade63eb93ea6abd06924
)
set(MIMALLOC_CMAKE_ARGS
@@ -31,13 +31,7 @@ if(ENABLE_VALGRIND)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
endif()
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
else()
set(MIMALLOC_LIBRARY mimalloc-static)
endif()
elseif(DEBUG)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-debug)
else()
set(MIMALLOC_LIBRARY mimalloc)

View File

@@ -120,9 +120,6 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
endif()
if(BUILDKITE)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a")
set(BUILDKITE_ARTIFACT_PATH libbun-profile.a.gz)
endif()
set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID})
else()
set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID})
@@ -138,20 +135,6 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
OUTPUT
${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a.gz")
add_custom_command(
COMMENT
"Unpacking libbun-profile.a.gz"
VERBATIM COMMAND
gunzip libbun-profile.a.gz
WORKING_DIRECTORY
${BUILD_PATH}
OUTPUT
${BUILD_PATH}/libbun-profile.a
DEPENDS
${BUILD_PATH}/libbun-profile.a.gz
)
endif()
endforeach()
list(APPEND BUILDKITE_JOBS_MATCH ${BUILDKITE_JOB_NAME})

View File

@@ -36,8 +36,7 @@ endif()
string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}")
if(CI)
set(GIT_CHANGED_SOURCES "${GIT_CHANGED_SOURCES}")
message(STATUS "Set GIT_CHANGED_SOURCES: ${GIT_CHANGED_SOURCES}")
setx(GIT_CHANGED_SOURCES ${GIT_CHANGED_SOURCES})
endif()
list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/)

View File

@@ -12,7 +12,7 @@ if(NOT ENABLE_LLVM)
return()
endif()
set(DEFAULT_LLVM_VERSION "19.1.7")
set(DEFAULT_LLVM_VERSION "18.1.8")
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 06820714a7990ea77c78157f9eeaabaf56c2098a)
set(WEBKIT_VERSION f14adff4c39ba08174b84a942f9584028fc9a7ae)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -20,7 +20,7 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
set(ZIG_COMMIT "bb9d6ab2c0bbbf20cc24dad03e88f3b3ffdb7de7")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")
@@ -50,7 +50,6 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF
optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local)
optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global)
optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler. Only availble on macos aarch64." DEFAULT OFF)
setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR})
setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR})
@@ -79,7 +78,6 @@ register_command(
-DZIG_PATH=${ZIG_PATH}
-DZIG_COMMIT=${ZIG_COMMIT}
-DENABLE_ASAN=${ENABLE_ASAN}
-DZIG_COMPILER_SAFE=${ZIG_COMPILER_SAFE}
-P ${CWD}/cmake/scripts/DownloadZig.cmake
SOURCES
${CWD}/cmake/scripts/DownloadZig.cmake

View File

@@ -55,7 +55,7 @@ RUN apt-get update -qq \
&& which bun \
&& bun --version
FROM debian:bookworm-slim
FROM debian:bullseye-slim
# Disable the runtime transpiler cache by default inside Docker containers.
# On ephemeral containers, the cache is not useful

View File

@@ -56,7 +56,7 @@ RUN apt-get update -qq \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun
FROM debian:bookworm
FROM debian:bullseye
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun

View File

@@ -1,449 +0,0 @@
Bun provides native APIs for working with HTTP cookies through `Bun.Cookie` and `Bun.CookieMap`. These APIs offer fast, easy-to-use methods for parsing, generating, and manipulating cookies in HTTP requests and responses.
## CookieMap class
`Bun.CookieMap` provides a Map-like interface for working with collections of cookies. It implements the `Iterable` interface, allowing you to use it with `for...of` loops and other iteration methods.
```ts
// Empty cookie map
const cookies = new Bun.CookieMap();
// From a cookie string
const cookies1 = new Bun.CookieMap("name=value; foo=bar");
// From an object
const cookies2 = new Bun.CookieMap({
session: "abc123",
theme: "dark",
});
// From an array of name/value pairs
const cookies3 = new Bun.CookieMap([
["session", "abc123"],
["theme", "dark"],
]);
```
### In HTTP servers
In Bun's HTTP server, the `cookies` property on the request object (in `routes`) is an instance of `CookieMap`:
```ts
const server = Bun.serve({
routes: {
"/": req => {
// Access request cookies
const cookies = req.cookies;
// Get a specific cookie
const sessionCookie = cookies.get("session");
if (sessionCookie != null) {
console.log(sessionCookie);
}
// Check if a cookie exists
if (cookies.has("theme")) {
// ...
}
// Set a cookie, it will be automatically applied to the response
cookies.set("visited", "true");
return new Response("Hello");
},
},
});
console.log("Server listening at: " + server.url);
```
### Methods
#### `get(name: string): string | null`
Retrieves a cookie by name. Returns `null` if the cookie doesn't exist.
```ts
// Get by name
const cookie = cookies.get("session");
if (cookie != null) {
console.log(cookie);
}
```
#### `has(name: string): boolean`
Checks if a cookie with the given name exists.
```ts
// Check if cookie exists
if (cookies.has("session")) {
// Cookie exists
}
```
#### `set(name: string, value: string): void`
#### `set(options: CookieInit): void`
#### `set(cookie: Cookie): void`
Adds or updates a cookie in the map. Cookies default to `{ path: "/", sameSite: "lax" }`.
```ts
// Set by name and value
cookies.set("session", "abc123");
// Set using options object
cookies.set({
name: "theme",
value: "dark",
maxAge: 3600,
secure: true,
});
// Set using Cookie instance
const cookie = new Bun.Cookie("visited", "true");
cookies.set(cookie);
```
#### `delete(name: string): void`
#### `delete(options: CookieStoreDeleteOptions): void`
Removes a cookie from the map. When applied to a Response, this adds a cookie with an empty string value and an expiry date in the past. A cookie will only delete successfully on the browser if the domain and path is the same as it was when the cookie was created.
```ts
// Delete by name using default domain and path.
cookies.delete("session");
// Delete with domain/path options.
cookies.delete({
name: "session",
domain: "example.com",
path: "/admin",
});
```
#### `toJSON(): Record<string, string>`
Converts the cookie map to a serializable format.
```ts
const json = cookies.toJSON();
```
#### `toSetCookieHeaders(): string[]`
Returns an array of values for Set-Cookie headers that can be used to apply all cookie changes.
When using `Bun.serve()`, you don't need to call this method explicitly. Any changes made to the `req.cookies` map are automatically applied to the response headers. This method is primarily useful when working with other HTTP server implementations.
```js
import { createServer } from "node:http";
import { CookieMap } from "bun";
const server = createServer((req, res) => {
const cookieHeader = req.headers.cookie || "";
const cookies = new CookieMap(cookieHeader);
cookies.set("view-count", Number(cookies.get("view-count") || "0") + 1);
cookies.delete("session");
res.writeHead(200, {
"Content-Type": "text/plain",
"Set-Cookie": cookies.toSetCookieHeaders(),
});
res.end(`Found ${cookies.size} cookies`);
});
server.listen(3000, () => {
console.log("Server running at http://localhost:3000/");
});
```
### Iteration
`CookieMap` provides several methods for iteration:
```ts
// Iterate over [name, cookie] entries
for (const [name, value] of cookies) {
console.log(`${name}: ${value}`);
}
// Using entries()
for (const [name, value] of cookies.entries()) {
console.log(`${name}: ${value}`);
}
// Using keys()
for (const name of cookies.keys()) {
console.log(name);
}
// Using values()
for (const value of cookies.values()) {
console.log(value);
}
// Using forEach
cookies.forEach((value, name) => {
console.log(`${name}: ${value}`);
});
```
### Properties
#### `size: number`
Returns the number of cookies in the map.
```ts
console.log(cookies.size); // Number of cookies
```
## Cookie class
`Bun.Cookie` represents an HTTP cookie with its name, value, and attributes.
```ts
import { Cookie } from "bun";
// Create a basic cookie
const cookie = new Bun.Cookie("name", "value");
// Create a cookie with options
const secureSessionCookie = new Bun.Cookie("session", "abc123", {
domain: "example.com",
path: "/admin",
expires: new Date(Date.now() + 86400000), // 1 day
httpOnly: true,
secure: true,
sameSite: "strict",
});
// Parse from a cookie string
const parsedCookie = new Bun.Cookie("name=value; Path=/; HttpOnly");
// Create from an options object
const objCookie = new Bun.Cookie({
name: "theme",
value: "dark",
maxAge: 3600,
secure: true,
});
```
### Constructors
```ts
// Basic constructor with name/value
new Bun.Cookie(name: string, value: string);
// Constructor with name, value, and options
new Bun.Cookie(name: string, value: string, options: CookieInit);
// Constructor from cookie string
new Bun.Cookie(cookieString: string);
// Constructor from cookie object
new Bun.Cookie(options: CookieInit);
```
### Properties
```ts
cookie.name; // string - Cookie name
cookie.value; // string - Cookie value
cookie.domain; // string | null - Domain scope (null if not specified)
cookie.path; // string - URL path scope (defaults to "/")
cookie.expires; // number | undefined - Expiration timestamp (ms since epoch)
cookie.secure; // boolean - Require HTTPS
cookie.sameSite; // "strict" | "lax" | "none" - SameSite setting
cookie.partitioned; // boolean - Whether the cookie is partitioned (CHIPS)
cookie.maxAge; // number | undefined - Max age in seconds
cookie.httpOnly; // boolean - Accessible only via HTTP (not JavaScript)
```
### Methods
#### `isExpired(): boolean`
Checks if the cookie has expired.
```ts
// Expired cookie (Date in the past)
const expiredCookie = new Bun.Cookie("name", "value", {
expires: new Date(Date.now() - 1000),
});
console.log(expiredCookie.isExpired()); // true
// Valid cookie (Using maxAge instead of expires)
const validCookie = new Bun.Cookie("name", "value", {
maxAge: 3600, // 1 hour in seconds
});
console.log(validCookie.isExpired()); // false
// Session cookie (no expiration)
const sessionCookie = new Bun.Cookie("name", "value");
console.log(sessionCookie.isExpired()); // false
```
#### `serialize(): string`
#### `toString(): string`
Returns a string representation of the cookie suitable for a `Set-Cookie` header.
```ts
const cookie = new Bun.Cookie("session", "abc123", {
domain: "example.com",
path: "/admin",
expires: new Date(Date.now() + 86400000),
secure: true,
httpOnly: true,
sameSite: "strict",
});
console.log(cookie.serialize());
// => "session=abc123; Domain=example.com; Path=/admin; Expires=Sun, 19 Mar 2025 15:03:26 GMT; Secure; HttpOnly; SameSite=strict"
console.log(cookie.toString());
// => "session=abc123; Domain=example.com; Path=/admin; Expires=Sun, 19 Mar 2025 15:03:26 GMT; Secure; HttpOnly; SameSite=strict"
```
#### `toJSON(): CookieInit`
Converts the cookie to a plain object suitable for JSON serialization.
```ts
const cookie = new Bun.Cookie("session", "abc123", {
secure: true,
httpOnly: true,
});
const json = cookie.toJSON();
// => {
// name: "session",
// value: "abc123",
// path: "/",
// secure: true,
// httpOnly: true,
// sameSite: "lax",
// partitioned: false
// }
// Works with JSON.stringify
const jsonString = JSON.stringify(cookie);
```
### Static methods
#### `Cookie.parse(cookieString: string): Cookie`
Parses a cookie string into a `Cookie` instance.
```ts
const cookie = Bun.Cookie.parse("name=value; Path=/; Secure; SameSite=Lax");
console.log(cookie.name); // "name"
console.log(cookie.value); // "value"
console.log(cookie.path); // "/"
console.log(cookie.secure); // true
console.log(cookie.sameSite); // "lax"
```
#### `Cookie.from(name: string, value: string, options?: CookieInit): Cookie`
Factory method to create a cookie.
```ts
const cookie = Bun.Cookie.from("session", "abc123", {
httpOnly: true,
secure: true,
maxAge: 3600,
});
```
## Types
```ts
interface CookieInit {
name?: string;
value?: string;
domain?: string;
/** Defaults to '/'. To allow the browser to set the path, use an empty string. */
path?: string;
expires?: number | Date | string;
secure?: boolean;
/** Defaults to `lax`. */
sameSite?: CookieSameSite;
httpOnly?: boolean;
partitioned?: boolean;
maxAge?: number;
}
interface CookieStoreDeleteOptions {
name: string;
domain?: string | null;
path?: string;
}
interface CookieStoreGetOptions {
name?: string;
url?: string;
}
type CookieSameSite = "strict" | "lax" | "none";
class Cookie {
constructor(name: string, value: string, options?: CookieInit);
constructor(cookieString: string);
constructor(cookieObject?: CookieInit);
readonly name: string;
value: string;
domain?: string;
path: string;
expires?: Date;
secure: boolean;
sameSite: CookieSameSite;
partitioned: boolean;
maxAge?: number;
httpOnly: boolean;
isExpired(): boolean;
serialize(): string;
toString(): string;
toJSON(): CookieInit;
static parse(cookieString: string): Cookie;
static from(name: string, value: string, options?: CookieInit): Cookie;
}
class CookieMap implements Iterable<[string, string]> {
constructor(init?: string[][] | Record<string, string> | string);
get(name: string): string | null;
toSetCookieHeaders(): string[];
has(name: string): boolean;
set(name: string, value: string, options?: CookieInit): void;
set(options: CookieInit): void;
delete(name: string): void;
delete(options: CookieStoreDeleteOptions): void;
delete(name: string, options: Omit<CookieStoreDeleteOptions, "name">): void;
toJSON(): Record<string, string>;
readonly size: number;
entries(): IterableIterator<[string, string]>;
keys(): IterableIterator<string>;
values(): IterableIterator<string>;
forEach(callback: (value: string, key: string, map: CookieMap) => void): void;
[Symbol.iterator](): IterableIterator<[string, string]>;
}
```

View File

@@ -204,7 +204,7 @@ To customize the TLS validation, use the `checkServerIdentity` option in `tls`
await fetch("https://example.com", {
tls: {
checkServerIdentity: (hostname, peerCertificate) => {
// Return an Error if the certificate is invalid
// Return an error if the certificate is invalid
},
},
});

View File

@@ -61,7 +61,6 @@ Routes in `Bun.serve()` receive a `BunRequest` (which extends [`Request`](https:
// Simplified for brevity
interface BunRequest<T extends string> extends Request {
params: Record<T, string>;
readonly cookies: CookieMap;
}
```
@@ -935,83 +934,6 @@ const server = Bun.serve({
Returns `null` for closed requests or Unix domain sockets.
## Working with Cookies
Bun provides a built-in API for working with cookies in HTTP requests and responses. The `BunRequest` object includes a `cookies` property that provides a `CookieMap` for easily accessing and manipulating cookies. When using `routes`, `Bun.serve()` automatically tracks `request.cookies.set` and applies them to the response.
### Reading cookies
Read cookies from incoming requests using the `cookies` property on the `BunRequest` object:
```ts
Bun.serve({
routes: {
"/profile": req => {
// Access cookies from the request
const userId = req.cookies.get("user_id");
const theme = req.cookies.get("theme") || "light";
return Response.json({
userId,
theme,
message: "Profile page",
});
},
},
});
```
### Setting cookies
To set cookies, use the `set` method on the `CookieMap` from the `BunRequest` object.
```ts
Bun.serve({
routes: {
"/login": req => {
const cookies = req.cookies;
// Set a cookie with various options
cookies.set("user_id", "12345", {
maxAge: 60 * 60 * 24 * 7, // 1 week
httpOnly: true,
secure: true,
path: "/",
});
// Add a theme preference cookie
cookies.set("theme", "dark");
// Modified cookies from the request are automatically applied to the response
return new Response("Login successful");
},
},
});
```
`Bun.serve()` automatically tracks modified cookies from the request and applies them to the response.
### Deleting cookies
To delete a cookie, use the `delete` method on the `request.cookies` (`CookieMap`) object:
```ts
Bun.serve({
routes: {
"/logout": req => {
// Delete the user_id cookie
req.cookies.delete("user_id", {
path: "/",
});
return new Response("Logged out successfully");
},
},
});
```
Deleted cookies become a `Set-Cookie` header on the response with the `maxAge` set to `0` and an empty `value`.
## Server Metrics
### server.pendingRequests and server.pendingWebSockets

View File

@@ -12,3 +12,5 @@ Alternatively, use `process.dlopen`:
let mod = { exports: {} };
process.dlopen(mod, "./my-node-module.node");
```
Bun polyfills the [`detect-libc`](https://npmjs.com/package/detect-libc) package, which is used by many Node-API modules to detect which `.node` binding to `require`.

View File

@@ -1,492 +0,0 @@
Bun provides native bindings for working with Redis databases with a modern, Promise-based API. The interface is designed to be simple and performant, with built-in connection management, fully typed responses, and TLS support. **New in Bun v1.2.9**
```ts
import { redis } from "bun";
// Set a key
await redis.set("greeting", "Hello from Bun!");
// Get a key
const greeting = await redis.get("greeting");
console.log(greeting); // "Hello from Bun!"
// Increment a counter
await redis.set("counter", 0);
await redis.incr("counter");
// Check if a key exists
const exists = await redis.exists("greeting");
// Delete a key
await redis.del("greeting");
```
## Getting Started
To use the Redis client, you first need to create a connection:
```ts
import { redis, RedisClient } from "bun";
// Using the default client (reads connection info from environment)
// process.env.REDIS_URL is used by default
await redis.set("hello", "world");
const result = await redis.get("hello");
// Creating a custom client
const client = new RedisClient("redis://username:password@localhost:6379");
await client.set("counter", "0");
await client.incr("counter");
```
By default, the client reads connection information from the following environment variables (in order of precedence):
- `REDIS_URL`
- If not set, defaults to `"redis://localhost:6379"`
### Connection Lifecycle
The Redis client automatically handles connections in the background:
```ts
// No connection is made until a command is executed
const client = new RedisClient();
// First command initiates the connection
await client.set("key", "value");
// Connection remains open for subsequent commands
await client.get("key");
// Explicitly close the connection when done
client.close();
```
You can also manually control the connection lifecycle:
```ts
const client = new RedisClient();
// Explicitly connect
await client.connect();
// Run commands
await client.set("key", "value");
// Disconnect when done
client.close();
```
## Basic Operations
### String Operations
```ts
// Set a key
await redis.set("user:1:name", "Alice");
// Get a key
const name = await redis.get("user:1:name");
// Delete a key
await redis.del("user:1:name");
// Check if a key exists
const exists = await redis.exists("user:1:name");
// Set expiration (in seconds)
await redis.set("session:123", "active");
await redis.expire("session:123", 3600); // expires in 1 hour
// Get time to live (in seconds)
const ttl = await redis.ttl("session:123");
```
### Numeric Operations
```ts
// Set initial value
await redis.set("counter", "0");
// Increment by 1
await redis.incr("counter");
// Decrement by 1
await redis.decr("counter");
```
### Hash Operations
```ts
// Set multiple fields in a hash
await redis.hmset("user:123", [
"name",
"Alice",
"email",
"alice@example.com",
"active",
"true",
]);
// Get multiple fields from a hash
const userFields = await redis.hmget("user:123", ["name", "email"]);
console.log(userFields); // ["Alice", "alice@example.com"]
// Increment a numeric field in a hash
await redis.hincrby("user:123", "visits", 1);
// Increment a float field in a hash
await redis.hincrbyfloat("user:123", "score", 1.5);
```
### Set Operations
```ts
// Add member to set
await redis.sadd("tags", "javascript");
// Remove member from set
await redis.srem("tags", "javascript");
// Check if member exists in set
const isMember = await redis.sismember("tags", "javascript");
// Get all members of a set
const allTags = await redis.smembers("tags");
// Get a random member
const randomTag = await redis.srandmember("tags");
// Pop (remove and return) a random member
const poppedTag = await redis.spop("tags");
```
## Advanced Usage
### Command Execution and Pipelining
The client automatically pipelines commands, improving performance by sending multiple commands in a batch and processing responses as they arrive.
```ts
// Commands are automatically pipelined by default
const [infoResult, listResult] = await Promise.all([
redis.get("user:1:name"),
redis.get("user:2:email"),
]);
```
To disable automatic pipelining, you can set the `enableAutoPipelining` option to `false`:
```ts
const client = new RedisClient("redis://localhost:6379", {
enableAutoPipelining: false,
});
```
### Raw Commands
When you need to use commands that don't have convenience methods, you can use the `send` method:
```ts
// Run any Redis command
const info = await redis.send("INFO", []);
// LPUSH to a list
await redis.send("LPUSH", ["mylist", "value1", "value2"]);
// Get list range
const list = await redis.send("LRANGE", ["mylist", "0", "-1"]);
```
The `send` method allows you to use any Redis command, even ones that don't have dedicated methods in the client. The first argument is the command name, and the second argument is an array of string arguments.
### Connection Events
You can register handlers for connection events:
```ts
const client = new RedisClient();
// Called when successfully connected to Redis server
client.onconnect = () => {
console.log("Connected to Redis server");
};
// Called when disconnected from Redis server
client.onclose = error => {
console.error("Disconnected from Redis server:", error);
};
// Manually connect/disconnect
await client.connect();
client.close();
```
### Connection Status and Monitoring
```ts
// Check if connected
console.log(client.connected); // boolean indicating connection status
// Check amount of data buffered (in bytes)
console.log(client.bufferedAmount);
```
### Type Conversion
The Redis client handles automatic type conversion for Redis responses:
- Integer responses are returned as JavaScript numbers
- Bulk strings are returned as JavaScript strings
- Simple strings are returned as JavaScript strings
- Null bulk strings are returned as `null`
- Array responses are returned as JavaScript arrays
- Error responses throw JavaScript errors with appropriate error codes
- Boolean responses (RESP3) are returned as JavaScript booleans
- Map responses (RESP3) are returned as JavaScript objects
- Set responses (RESP3) are returned as JavaScript arrays
Special handling for specific commands:
- `EXISTS` returns a boolean instead of a number (1 becomes true, 0 becomes false)
- `SISMEMBER` returns a boolean (1 becomes true, 0 becomes false)
The following commands disable automatic pipelining:
- `AUTH`
- `INFO`
- `QUIT`
- `EXEC`
- `MULTI`
- `WATCH`
- `SCRIPT`
- `SELECT`
- `CLUSTER`
- `DISCARD`
- `UNWATCH`
- `PIPELINE`
- `SUBSCRIBE`
- `UNSUBSCRIBE`
- `UNPSUBSCRIBE`
## Connection Options
When creating a client, you can pass various options to configure the connection:
```ts
const client = new RedisClient("redis://localhost:6379", {
// Connection timeout in milliseconds (default: 10000)
connectionTimeout: 5000,
// Idle timeout in milliseconds (default: 0 = no timeout)
idleTimeout: 30000,
// Whether to automatically reconnect on disconnection (default: true)
autoReconnect: true,
// Maximum number of reconnection attempts (default: 10)
maxRetries: 10,
// Whether to queue commands when disconnected (default: true)
enableOfflineQueue: true,
// Whether to automatically pipeline commands (default: true)
enableAutoPipelining: true,
// TLS options (default: false)
tls: true,
// Alternatively, provide custom TLS config:
// tls: {
// rejectUnauthorized: true,
// ca: "path/to/ca.pem",
// cert: "path/to/cert.pem",
// key: "path/to/key.pem",
// }
});
```
### Reconnection Behavior
When a connection is lost, the client automatically attempts to reconnect with exponential backoff:
1. The client starts with a small delay (50ms) and doubles it with each attempt
2. Reconnection delay is capped at 2000ms (2 seconds)
3. The client attempts to reconnect up to `maxRetries` times (default: 10)
4. Commands executed during disconnection are:
- Queued if `enableOfflineQueue` is true (default)
- Rejected immediately if `enableOfflineQueue` is false
## Supported URL Formats
The Redis client supports various URL formats:
```ts
// Standard Redis URL
new RedisClient("redis://localhost:6379");
new RedisClient("redis://localhost:6379");
// With authentication
new RedisClient("redis://username:password@localhost:6379");
// With database number
new RedisClient("redis://localhost:6379/0");
// TLS connections
new RedisClient("rediss://localhost:6379");
new RedisClient("rediss://localhost:6379");
new RedisClient("redis+tls://localhost:6379");
new RedisClient("redis+tls://localhost:6379");
// Unix socket connections
new RedisClient("redis+unix:///path/to/socket");
new RedisClient("redis+unix:///path/to/socket");
// TLS over Unix socket
new RedisClient("redis+tls+unix:///path/to/socket");
new RedisClient("redis+tls+unix:///path/to/socket");
```
## Error Handling
The Redis client throws typed errors for different scenarios:
```ts
try {
await redis.get("non-existent-key");
} catch (error) {
if (error.code === "ERR_REDIS_CONNECTION_CLOSED") {
console.error("Connection to Redis server was closed");
} else if (error.code === "ERR_REDIS_AUTHENTICATION_FAILED") {
console.error("Authentication failed");
} else {
console.error("Unexpected error:", error);
}
}
```
Common error codes:
- `ERR_REDIS_CONNECTION_CLOSED` - Connection to the server was closed
- `ERR_REDIS_AUTHENTICATION_FAILED` - Failed to authenticate with the server
- `ERR_REDIS_INVALID_RESPONSE` - Received an invalid response from the server
## Example Use Cases
### Caching
```ts
async function getUserWithCache(userId) {
const cacheKey = `user:${userId}`;
// Try to get from cache first
const cachedUser = await redis.get(cacheKey);
if (cachedUser) {
return JSON.parse(cachedUser);
}
// Not in cache, fetch from database
const user = await database.getUser(userId);
// Store in cache for 1 hour
await redis.set(cacheKey, JSON.stringify(user));
await redis.expire(cacheKey, 3600);
return user;
}
```
### Rate Limiting
```ts
async function rateLimit(ip, limit = 100, windowSecs = 3600) {
const key = `ratelimit:${ip}`;
// Increment counter
const count = await redis.incr(key);
// Set expiry if this is the first request in window
if (count === 1) {
await redis.expire(key, windowSecs);
}
// Check if limit exceeded
return {
limited: count > limit,
remaining: Math.max(0, limit - count),
};
}
```
### Session Storage
```ts
async function createSession(userId, data) {
const sessionId = crypto.randomUUID();
const key = `session:${sessionId}`;
// Store session with expiration
await redis.hmset(key, [
"userId",
userId.toString(),
"created",
Date.now().toString(),
"data",
JSON.stringify(data),
]);
await redis.expire(key, 86400); // 24 hours
return sessionId;
}
async function getSession(sessionId) {
const key = `session:${sessionId}`;
// Get session data
const exists = await redis.exists(key);
if (!exists) return null;
const [userId, created, data] = await redis.hmget(key, [
"userId",
"created",
"data",
]);
return {
userId: Number(userId),
created: Number(created),
data: JSON.parse(data),
};
}
```
## Implementation Notes
Bun's Redis client is implemented in Zig and uses the Redis Serialization Protocol (RESP3). It manages connections efficiently and provides automatic reconnection with exponential backoff.
The client supports pipelining commands, meaning multiple commands can be sent without waiting for the replies to previous commands. This significantly improves performance when sending multiple commands in succession.
### RESP3 Protocol Support
Bun's Redis client uses the newer RESP3 protocol by default, which provides more data types and features compared to RESP2:
- Better error handling with typed errors
- Native Boolean responses
- Map/Dictionary responses (key-value objects)
- Set responses
- Double (floating point) values
- BigNumber support for large integer values
When connecting to Redis servers using older versions that don't support RESP3, the client automatically fallbacks to compatible modes.
## Limitations and Future Plans
Current limitations of the Redis client we are planning to address in future versions:
- [ ] No dedicated API for pub/sub functionality (though you can use the raw command API)
- [ ] Transactions (MULTI/EXEC) must be done through raw commands for now
- [ ] Streams are supported but without dedicated methods
Unsupported features:
- Redis Sentinel
- Redis Cluster

View File

@@ -619,48 +619,6 @@ When the S3 Object Storage service returns an error (that is, not Bun), it will
The `S3Client` class provides several static methods for interacting with S3.
### `S3Client.write` (static)
To write data directly to a path in the bucket, you can use the `S3Client.write` static method.
```ts
import { S3Client } from "bun";
const credentials = {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
};
// Write string
await S3Client.write("my-file.txt", "Hello World");
// Write JSON with type
await S3Client.write(
"data.json",
JSON.stringify({hello: "world"}),
{
...credentials,
type: "application/json",
}
);
// Write from fetch
const res = await fetch("https://example.com/data");
await S3Client.write("data.bin", res, credentials);
// Write with ACL
await S3Client.write("public.html", html, {
...credentials,
acl: "public-read",
type: "text/html"
});
```
This is equivalent to calling `new S3Client(credentials).write("my-file.txt", "Hello World")`.
### `S3Client.presign` (static)
To generate a presigned URL for an S3 file, you can use the `S3Client.presign` static method.
@@ -684,45 +642,6 @@ const url = S3Client.presign("my-file.txt", {
This is equivalent to calling `new S3Client(credentials).presign("my-file.txt", { expiresIn: 3600 })`.
### `S3Client.list` (static)
To list some or all (up to 1,000) objects in a bucket, you can use the `S3Client.list` static method.
```ts
import { S3Client } from "bun";
const credentials = {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
};
// List (up to) 1000 objects in the bucket
const allObjects = await S3Client.list(null, credentials);
// List (up to) 500 objects under `uploads/` prefix, with owner field for each object
const uploads = await S3Client.list({
prefix: 'uploads/',
maxKeys: 500,
fetchOwner: true,
}, credentials);
// Check if more results are available
if (uploads.isTruncated) {
// List next batch of objects under `uploads/` prefix
const moreUploads = await S3Client.list({
prefix: 'uploads/',
maxKeys: 500,
startAfter: uploads.contents!.at(-1).key
fetchOwner: true,
}, credentials);
}
```
This is equivalent to calling `new S3Client(credentials).list()`.
### `S3Client.exists` (static)
To check if an S3 file exists, you can use the `S3Client.exists` static method.
@@ -735,7 +654,6 @@ const credentials = {
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
};
const exists = await S3Client.exists("my-file.txt", credentials);
@@ -752,26 +670,6 @@ const s3file = s3.file("my-file.txt", {
const exists = await s3file.exists();
```
### `S3Client.size` (static)
To quickly check the size of S3 file without downloading it, you can use the `S3Client.size` static method.
```ts
import { S3Client } from "bun";
const credentials = {
accessKeyId: "your-access-key",
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
};
const bytes = await S3Client.size("my-file.txt", credentials);
```
This is equivalent to calling `new S3Client(credentials).size("my-file.txt")`.
### `S3Client.stat` (static)
To get the size, etag, and other metadata of an S3 file, you can use the `S3Client.stat` static method.
@@ -784,7 +682,6 @@ const credentials = {
secretAccessKey: "your-secret-key",
bucket: "my-bucket",
// endpoint: "https://s3.us-east-1.amazonaws.com",
// endpoint: "https://<account-id>.r2.cloudflarestorage.com", // Cloudflare R2
};
const stat = await S3Client.stat("my-file.txt", credentials);
@@ -818,7 +715,7 @@ await S3Client.delete("my-file.txt", credentials);
await S3Client.unlink("my-file.txt", credentials);
```
## `s3://` protocol
## s3:// protocol
To make it easier to use the same code for local files and S3 files, the `s3://` protocol is supported in `fetch` and `Bun.file()`.

View File

@@ -77,16 +77,6 @@ console.log(text); // "const input = "hello world".repeat(400); ..."
---
- `ReadableStream`
- Use a readable stream as input.
---
- `Blob`
- Use a blob as input.
---
- `number`
- Read from the file with a given file descriptor.
@@ -139,13 +129,13 @@ Configure the output stream by passing one of the following values to `stdout/st
---
- `"ignore"`
- Discard the output.
- `Bun.file()`
- Write to the specified file.
---
- `Bun.file()`
- Write to the specified file.
- `null`
- Write to `/dev/null`.
---
@@ -184,8 +174,7 @@ const proc = Bun.spawn(["bun", "--version"]);
proc.kill();
proc.killed; // true
proc.kill(15); // specify a signal code
proc.kill("SIGTERM"); // specify a signal name
proc.kill(); // specify an exit code
```
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
@@ -195,77 +184,6 @@ const proc = Bun.spawn(["bun", "--version"]);
proc.unref();
```
## Resource usage
You can get information about the process's resource usage after it has exited:
```ts
const proc = Bun.spawn(["bun", "--version"]);
await proc.exited;
const usage = proc.resourceUsage();
console.log(`Max memory used: ${usage.maxRSS} bytes`);
console.log(`CPU time (user): ${usage.cpuTime.user} µs`);
console.log(`CPU time (system): ${usage.cpuTime.system} µs`);
```
## Using AbortSignal
You can abort a subprocess using an `AbortSignal`:
```ts
const controller = new AbortController();
const { signal } = controller;
const proc = Bun.spawn({
cmd: ["sleep", "100"],
signal,
});
// Later, to abort the process:
controller.abort();
```
## Using timeout and killSignal
You can set a timeout for a subprocess to automatically terminate after a specific duration:
```ts
// Kill the process after 5 seconds
const proc = Bun.spawn({
cmd: ["sleep", "10"],
timeout: 5000, // 5 seconds in milliseconds
});
await proc.exited; // Will resolve after 5 seconds
```
By default, timed-out processes are killed with the `SIGTERM` signal. You can specify a different signal with the `killSignal` option:
```ts
// Kill the process with SIGKILL after 5 seconds
const proc = Bun.spawn({
cmd: ["sleep", "10"],
timeout: 5000,
killSignal: "SIGKILL", // Can be string name or signal number
});
```
The `killSignal` option also controls which signal is sent when an AbortSignal is aborted.
## Using maxBuffer
For spawnSync, you can limit the maximum number of bytes of output before the process is killed:
```ts
// KIll 'yes' after it emits over 100 bytes of output
const result = Bun.spawnSync({
cmd: ["yes"], // or ["bun", "exec", "yes"] on windows
maxBuffer: 100,
});
// process exits
```
## Inter-process communication (IPC)
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
@@ -315,17 +233,11 @@ process.send("Hello from child as string");
process.send({ message: "Hello from child as object" });
```
The `serialization` option controls the underlying communication format between the two processes:
The `ipcMode` option controls the underlying communication format between the two processes:
- `advanced`: (default) Messages are serialized using the JSC `serialize` API, which supports cloning [everything `structuredClone` supports](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm). This does not support transferring ownership of objects.
- `json`: Messages are serialized using `JSON.stringify` and `JSON.parse`, which does not support as many object types as `advanced` does.
To disconnect the IPC channel from the parent process, call:
```ts
childProc.disconnect();
```
### IPC between Bun & Node.js
To use IPC between a `bun` process and a Node.js process, set `serialization: "json"` in `Bun.spawn`. This is because Node.js and Bun use different JavaScript engines with different object serialization formats.
@@ -398,7 +310,7 @@ spawnSync echo hi 1.47 ms/iter (1.14 ms … 2.64 ms) 1.57 ms 2.37 ms
## Reference
A reference of the Spawn API and types are shown below. The real types have complex generics to strongly type the `Subprocess` streams with the options passed to `Bun.spawn` and `Bun.spawnSync`. For full details, find these types as defined [bun.d.ts](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts).
A simple reference of the Spawn API and types are shown below. The real types have complex generics to strongly type the `Subprocess` streams with the options passed to `Bun.spawn` and `Bun.spawnSync`. For full details, find these types as defined [bun.d.ts](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts).
```ts
interface Bun {
@@ -417,26 +329,16 @@ interface Bun {
namespace SpawnOptions {
interface OptionsObject {
cwd?: string;
env?: Record<string, string | undefined>;
stdio?: [Writable, Readable, Readable];
stdin?: Writable;
stdout?: Readable;
stderr?: Readable;
onExit?(
subprocess: Subprocess,
env?: Record<string, string>;
stdin?: SpawnOptions.Readable;
stdout?: SpawnOptions.Writable;
stderr?: SpawnOptions.Writable;
onExit?: (
proc: Subprocess,
exitCode: number | null,
signalCode: number | null,
error?: ErrorLike,
): void | Promise<void>;
ipc?(message: any, subprocess: Subprocess): void;
serialization?: "json" | "advanced";
windowsHide?: boolean;
windowsVerbatimArguments?: boolean;
argv0?: string;
signal?: AbortSignal;
timeout?: number;
killSignal?: string | number;
maxBuffer?: number;
signalCode: string | null,
error: Error | null,
) => void;
}
type Readable =
@@ -464,62 +366,39 @@ namespace SpawnOptions {
| Request;
}
interface Subprocess extends AsyncDisposable {
readonly stdin: FileSink | number | undefined;
readonly stdout: ReadableStream<Uint8Array> | number | undefined;
readonly stderr: ReadableStream<Uint8Array> | number | undefined;
readonly readable: ReadableStream<Uint8Array> | number | undefined;
interface Subprocess<Stdin, Stdout, Stderr> {
readonly pid: number;
// the exact stream types here are derived from the generic parameters
readonly stdin: number | ReadableStream | FileSink | undefined;
readonly stdout: number | ReadableStream | undefined;
readonly stderr: number | ReadableStream | undefined;
readonly exited: Promise<number>;
readonly exitCode: number | null;
readonly signalCode: NodeJS.Signals | null;
readonly exitCode: number | undefined;
readonly signalCode: Signal | null;
readonly killed: boolean;
kill(exitCode?: number | NodeJS.Signals): void;
ref(): void;
unref(): void;
send(message: any): void;
disconnect(): void;
resourceUsage(): ResourceUsage | undefined;
kill(code?: number): void;
}
interface SyncSubprocess {
stdout: Buffer | undefined;
stderr: Buffer | undefined;
exitCode: number;
success: boolean;
resourceUsage: ResourceUsage;
signalCode?: string;
exitedDueToTimeout?: true;
pid: number;
interface SyncSubprocess<Stdout, Stderr> {
readonly pid: number;
readonly success: boolean;
// the exact buffer types here are derived from the generic parameters
readonly stdout: Buffer | undefined;
readonly stderr: Buffer | undefined;
}
interface ResourceUsage {
contextSwitches: {
voluntary: number;
involuntary: number;
};
type ReadableSubprocess = Subprocess<any, "pipe", "pipe">;
type WritableSubprocess = Subprocess<"pipe", any, any>;
type PipedSubprocess = Subprocess<"pipe", "pipe", "pipe">;
type NullSubprocess = Subprocess<null, null, null>;
cpuTime: {
user: number;
system: number;
total: number;
};
maxRSS: number;
messages: {
sent: number;
received: number;
};
ops: {
in: number;
out: number;
};
shmSize: number;
signalCount: number;
swapCount: number;
}
type ReadableSyncSubprocess = SyncSubprocess<"pipe", "pipe">;
type NullSyncSubprocess = SyncSubprocess<null, null>;
type Signal =
| "SIGABRT"

View File

@@ -240,7 +240,7 @@ const result = await sql.unsafe(
### Execute and Cancelling Queries
Bun's SQL is lazy, which means it will only start executing when awaited or executed with `.execute()`.
Bun's SQL is lazy that means its will only start executing when awaited or executed with `.execute()`.
You can cancel a query that is currently executing by calling the `cancel()` method on the query object.
```ts

View File

@@ -11,7 +11,7 @@ Bun.listen({
socket: {
data(socket, data) {}, // message received from client
open(socket) {}, // socket opened
close(socket, error) {}, // socket closed
close(socket) {}, // socket closed
drain(socket) {}, // socket ready for more data
error(socket, error) {}, // error handler
},
@@ -30,7 +30,7 @@ Bun.listen({
open(socket) {},
data(socket, data) {},
drain(socket) {},
close(socket, error) {},
close(socket) {},
error(socket, error) {},
},
});
@@ -122,7 +122,7 @@ const socket = await Bun.connect({
socket: {
data(socket, data) {},
open(socket) {},
close(socket, error) {},
close(socket) {},
drain(socket) {},
error(socket, error) {},

File diff suppressed because it is too large Load Diff

View File

@@ -1,145 +0,0 @@
# CSS Modules
Bun's bundler also supports bundling [CSS modules](https://css-tricks.com/css-modules-part-1-need/) in addition to [regular CSS](/docs/bundler/css) with support for the following features:
- Automatically detecting CSS module files (`.module.css`) with zero configuration
- Composition (`composes` property)
- Importing CSS modules into JSX/TSX
- Warnings/errors for invalid usages of CSS modules
A CSS module is a CSS file (with the `.module.css` extension) where are all class names and animations are scoped to the file. This helps you avoid class name collisions as CSS declarations are globally scoped by default.
Under the hood, Bun's bundler transforms locally scoped class names into unique identifiers.
## Getting started
Create a CSS file with the `.module.css` extension:
```css
/* styles.module.css */
.button {
color: red;
}
/* other-styles.module.css */
.button {
color: blue;
}
```
You can then import this file, for example into a TSX file:
```tsx
import styles from "./styles.module.css";
import otherStyles from "./other-styles.module.css";
export default function App() {
return (
<>
<button className={styles.button}>Red button!</button>
<button className={otherStyles.button}>Blue button!</button>
</>
);
}
```
The `styles` object from importing the CSS module file will be an object with all class names as keys and
their unique identifiers as values:
```tsx
import styles from "./styles.module.css";
import otherStyles from "./other-styles.module.css";
console.log(styles);
console.log(otherStyles);
```
This will output:
```ts
{
button: "button_123";
}
{
button: "button_456";
}
```
As you can see, the class names are unique to each file, avoiding any collisions!
### Composition
CSS modules allow you to _compose_ class selectors together. This lets you reuse style rules across multiple classes.
For example:
```css
/* styles.module.css */
.button {
composes: background;
color: red;
}
.background {
background-color: blue;
}
```
Would be the same as writing:
```css
.button {
background-color: blue;
color: red;
}
.background {
background-color: blue;
}
```
{% callout %}
There are a couple rules to keep in mind when using `composes`:
- A `composes` property must come before any regular CSS properties or declarations
- You can only use `composes` on a **simple selector with a single class name**:
```css
#button {
/* Invalid! `#button` is not a class selector */
composes: background;
}
.button,
.button-secondary {
/* Invalid! `.button, .button-secondary` is not a simple selector */
composes: background;
}
```
{% /callout %}
### Composing from a separate CSS module file
You can also compose from a separate CSS module file:
```css
/* background.module.css */
.background {
background-color: blue;
}
/* styles.module.css */
.button {
composes: background from "./background.module.css";
color: red;
}
```
{% callout %}
When composing classes from separate files, be sure that they do not contain the same properties.
The CSS module spec says that composing classes from separate files with conflicting properties is
undefined behavior, meaning that the output may differ and be unreliable.
{% /callout %}

View File

@@ -309,4 +309,5 @@ This works similarly to how [`Bun.build` processes HTML files](/docs/bundler/htm
## This is a work in progress
- ~Client-side hot reloading isn't wired up yet. It will be in the future.~ New in Bun v1.2.3
- This doesn't support `bun build` yet. It also will in the future.

View File

@@ -1,234 +0,0 @@
Hot Module Replacement (HMR) allows you to update modules in a running
application without needing a full page reload. This preserves the application
state and improves the development experience.
HMR is enabled by default when using Bun's full-stack development server.
## `import.meta.hot` API Reference
Bun implements a client-side HMR API modeled after [Vite's `import.meta.hot` API](https://vitejs.dev/guide/api-hmr.html). It can be checked for with `if (import.meta.hot)`, tree-shaking it in production
```ts
if (import.meta.hot) {
// HMR APIs are available.
}
```
However, **this check is often not needed** as Bun will dead-code-eliminate
calls to all of the HMR APIs in production builds.
```ts
// This entire function call will be removed in production!
import.meta.hot.dispose(() => {
console.log("dispose");
});
```
For this to work, Bun forces these APIs to be called without indirection. That means the following do not work:
```ts#invalid-hmr-usage.ts
// INVALID: Assigning `hot` to a variable
const hot = import.meta.hot;
hot.accept();
// INVALID: Assigning `import.meta` to a variable
const meta = import.meta;
meta.hot.accept();
console.log(meta.hot.data);
// INVALID: Passing to a function
doSomething(import.meta.hot.dispose);
// OK: The full phrase "import.meta.hot.<API>" must be called directly:
import.meta.hot.accept();
// OK: `data` can be passed to functions:
doSomething(import.meta.hot.data);
```
{% callout %}
**Note** — The HMR API is still a work in progress. Some features are missing. HMR can be disabled in `Bun.serve` by setting the `development` option to `{ hmr: false }`.
{% endcallout %}
| | Method | Notes |
| --- | ------------------ | --------------------------------------------------------------------- |
| ✅ | `hot.accept()` | Indicate that a hot update can be replaced gracefully. |
| ✅ | `hot.data` | Persist data between module evaluations. |
| ✅ | `hot.dispose()` | Add a callback function to run when a module is about to be replaced. |
| ❌ | `hot.invalidate()` | |
| ✅ | `hot.on()` | Attach an event listener |
| ✅ | `hot.off()` | Remove an event listener from `on`. |
| ❌ | `hot.send()` | |
| 🚧 | `hot.prune()` | **NOTE**: Callback is currently never called. |
| ✅ | `hot.decline()` | No-op to match Vite's `import.meta.hot` |
### `import.meta.hot.accept()`
The `accept()` method indicates that a module can be hot-replaced. When called
without arguments, it indicates that this module can be replaced simply by
re-evaluating the file. After a hot update, importers of this module will be
automatically patched.
```ts#index.ts
import { getCount } from "./foo.ts";
console.log("count is ", getCount());
import.meta.hot.accept();
export function getNegativeCount() {
return -getCount();
}
```
This creates a hot-reloading boundary for all of the files that `index.ts`
imports. That means whenever `foo.ts` or any of its dependencies are saved, the
update will bubble up to `index.ts` will re-evaluate. Files that import
`index.ts` will then be patched to import the new version of
`getNegativeCount()`. If only `index.ts` is updated, only the one file will be
re-evaluated, and the counter in `foo.ts` is reused.
This may be used in combination with `import.meta.hot.data` to transfer state
from the previous module to the new one.
When no modules call `import.meta.hot.accept()` (and there isn't React Fast
Refresh or a plugin calling it for you), the page will reload when the file
updates, and a console warning shows which files were invalidated. This warning
is safe to ignore if it makes more sense to rely on full page reloads.
#### With callback
When provided one callback, `import.meta.hot.accept` will function how it does
in Vite. Instead of patching the importers of this module, it will call the
callback with the new module.
```ts
export const count = 0;
import.meta.hot.accept(newModule => {
if (newModule) {
// newModule is undefined when SyntaxError happened
console.log("updated: count is now ", newModule.count);
}
});
```
Prefer using `import.meta.hot.accept()` without an argument as it usually makes your code easier to understand.
#### Accepting other modules
```ts
import { count } from "./foo";
import.meta.hot.accept("./foo", () => {
if (!newModule) return;
console.log("updated: count is now ", count);
});
```
Indicates that a dependency's module can be accepted. When the dependency is updated, the callback will be called with the new module.
#### With multiple dependencies
```ts
import.meta.hot.accept(["./foo", "./bar"], newModules => {
// newModules is an array where each item corresponds to the updated module
// or undefined if that module had a syntax error
});
```
Indicates that multiple dependencies' modules can be accepted. This variant accepts an array of dependencies, where the callback will receive the updated modules, and `undefined` for any that had errors.
### `import.meta.hot.data`
`import.meta.hot.data` maintains state between module instances during hot
replacement, enabling data transfer from previous to new versions. When
`import.meta.hot.data` is written into, Bun will also mark this module as
capable of self-accepting (equivalent of calling `import.meta.hot.accept()`).
```ts
import { createRoot } from "react-dom/client";
import { App } from "./app";
const root = import.meta.hot.data.root ??= createRoot(elem);
root.render(<App />); // re-use an existing root
```
In production, `data` is inlined to be `{}`, meaning it cannot be used as a state holder.
The above pattern is recommended for stateful modules because Bun knows it can minify `{}.prop ??= value` into `value` in production.
### `import.meta.hot.dispose()`
Attaches an on-dispose callback. This is called:
- Just before the module is replaced with another copy (before the next is loaded)
- After the module is detached (removing all imports to this module, see `import.meta.hot.prune()`)
```ts
const sideEffect = setupSideEffect();
import.meta.hot.dispose(() => {
sideEffect.cleanup();
});
```
This callback is not called on route navigation or when the browser tab closes.
Returning a promise will delay module replacement until the module is disposed.
All dispose callbacks are called in parallel.
### `import.meta.hot.prune()`
Attaches an on-prune callback. This is called when all imports to this module
are removed, but the module was previously loaded.
This can be used to clean up resources that were created when the module was
loaded. Unlike `import.meta.hot.dispose()`, this pairs much better with `accept`
and `data` to manage stateful resources. A full example managing a `WebSocket`:
```ts
import { something } from "./something";
// Initialize or re-use a WebSocket connection
export const ws = (import.meta.hot.data.ws ??= new WebSocket(location.origin));
// If the module's import is removed, clean up the WebSocket connection.
import.meta.hot.prune(() => {
ws.close();
});
```
If `dispose` was used instead, the WebSocket would close and re-open on every
hot update. Both versions of the code will prevent page reloads when imported
files are updated.
### `import.meta.hot.on()` and `off()`
`on()` and `off()` are used to listen for events from the HMR runtime. Event names are prefixed with a prefix so that plugins do not conflict with each other.
```ts
import.meta.hot.on("bun:beforeUpdate", () => {
console.log("before a hot update");
});
```
When a file is replaced, all of its event listeners are automatically removed.
A list of all built-in events:
| Event | Emitted when |
| ---------------------- | ----------------------------------------------------------------------------------------------- |
| `bun:beforeUpdate` | before a hot update is applied. |
| `bun:afterUpdate` | after a hot update is applied. |
| `bun:beforeFullReload` | before a full page reload happens. |
| `bun:beforePrune` | before prune callbacks are called. |
| `bun:invalidate` | when a module is invalidated with `import.meta.hot.invalidate()` |
| `bun:error` | when a build or runtime error occurs |
| `bun:ws:disconnect` | when the HMR WebSocket connection is lost. This can indicate the development server is offline. |
| `bun:ws:connect` | when the HMR WebSocket connects or re-connects. |
For compatibility with Vite, the above events are also available via `vite:*` prefix instead of `bun:*`.

View File

@@ -4,12 +4,6 @@ The Bun bundler implements a set of default loaders out of the box. As a rule of
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](https://bun.sh/docs/bundler/plugins) that extend Bun with custom loaders.
You can explicitly specify which loader to use using the 'loader' import attribute.
```ts
import my_toml from "./my_file" with { loader: "toml" };
```
## Built-in loaders
### `js`

View File

@@ -82,11 +82,6 @@ The `--dry-run` flag can be used to simulate the publish process without actuall
$ bun publish --dry-run
```
### `--gzip-level`
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
{% bunCLIUsage command="publish" /%}
### `--auth-type`
If you have 2FA enabled for your npm account, `bun publish` will prompt you for a one-time password. This can be done through a browser or the CLI. The `--auth-type` flag can be used to tell the npm registry which method you prefer. The possible values are `web` and `legacy`, with `web` being the default.
@@ -107,6 +102,7 @@ Provide a one-time password directly to the CLI. If the password is valid, this
$ bun publish --otp 123456
```
{% callout %}
**Note** - `bun publish` respects the `NPM_CONFIG_TOKEN` environment variable which can be used when publishing in github actions or automated workflows.
{% /callout %}
### `--gzip-level`
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
{% bunCLIUsage command="publish" /%}

View File

@@ -15,8 +15,8 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
```jsonc
{
"compilerOptions": {
// Environment setup & latest features
"lib": ["ESNext"],
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
@@ -33,12 +33,11 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false,
// Some stricter flags
"noUnusedLocals": true,
"noUnusedParameters": true,
"noPropertyAccessFromIndexSignature": true,
},
}
```

View File

@@ -215,19 +215,12 @@ export default {
page("bundler", "`Bun.build`", {
description: "Bundle code for consumption in the browser with Bun's native bundler.",
}),
page("bundler/html", "HTML & static sites", {
page("bundler/html", "Bundle frontend & static sites", {
description: `Zero-config HTML bundler for single-page apps and multi-page apps. Automatic bundling, TailwindCSS plugins, TypeScript, JSX, React support, and incredibly fast builds`,
}),
page("bundler/css", "CSS", {
description: `Production ready CSS bundler with support for modern CSS features, CSS modules, and more.`,
}),
page("bundler/fullstack", "Fullstack Dev Server", {
description: "Serve your frontend and backend from the same app with Bun's dev server.",
}),
page("bundler/hmr", "Hot reloading", {
description: `Update modules in a running application without reloading the page using import.meta.hot`,
}),
page("bundler/loaders", "Loaders", {
description: "Bun's built-in loaders for the bundler and runtime",
}),
@@ -265,25 +258,12 @@ export default {
page("test/time", "Dates and times", {
description: "Control the date & time in your tests for more reliable and deterministic tests",
}),
page("test/coverage", "Code coverage", {
description: "Generate code coverage reports with `bun test --coverage`",
}),
page("test/reporters", "Test reporters", {
description: "Add a junit reporter to your test runs",
}),
page("test/configuration", "Test configuration", {
description: "Configure the test runner with bunfig.toml",
}),
page("test/runtime-behavior", "Runtime behavior", {
description: "Learn how the test runner affects Bun's runtime behavior",
}),
page("test/discovery", "Finding tests", {
description: "Learn how the test runner discovers tests",
}),
page("test/dom", "DOM testing", {
description: "Write headless tests for UI and React/Vue/Svelte/Lit components with happy-dom",
}),
page("test/coverage", "Code coverage", {
description: "Generate code coverage reports with `bun test --coverage`",
}),
divider("Package runner"),
page("cli/bunx", "`bunx`", {
@@ -344,9 +324,6 @@ export default {
page("api/file-io", "File I/O", {
description: `Read and write files fast with Bun's heavily optimized file system API.`,
}), // "`Bun.write`"),
page("api/redis", "Redis client", {
description: `Bun provides a fast, native Redis client with automatic command pipelining for better performance.`,
}),
page("api/import-meta", "import.meta", {
description: `Module-scoped metadata and utilities`,
}), // "`bun:sqlite`"),
@@ -371,24 +348,24 @@ export default {
page("api/spawn", "Child processes", {
description: `Spawn sync and async child processes with easily configurable input and output streams.`,
}), // "`Bun.spawn`"),
page("api/html-rewriter", "HTMLRewriter", {
description: `Parse and transform HTML with Bun's native HTMLRewriter API, inspired by Cloudflare Workers.`,
}), // "`HTMLRewriter`"),
page("api/transpiler", "Transpiler", {
description: `Bun exposes its internal transpiler as a pluggable API.`,
}), // "`Bun.Transpiler`"),
page("api/hashing", "Hashing", {
description: `Native support for a range of fast hashing algorithms.`,
}), // "`Bun.serve`"),
page("api/console", "Console", {
description: `Bun implements a Node.js-compatible \`console\` object with colorized output and deep pretty-printing.`,
}), // "`Node-API`"),
page("api/cookie", "Cookie", {
description: "Bun's native Cookie API simplifies working with HTTP cookies.",
}), // "`Node-API`"),
page("api/ffi", "FFI", {
description: `Call native code from JavaScript with Bun's foreign function interface (FFI) API.`,
}), // "`bun:ffi`"),
page("api/cc", "C Compiler", {
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
}), // "`bun:ffi`"),
page("api/html-rewriter", "HTMLRewriter", {
description: `Parse and transform HTML with Bun's native HTMLRewriter API, inspired by Cloudflare Workers.`,
}), // "`HTMLRewriter`"),
page("api/test", "Testing", {
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
}), // "`bun:test`"),
@@ -414,9 +391,6 @@ export default {
page("api/color", "Color", {
description: `Bun's color function leverages Bun's CSS parser for parsing, normalizing, and converting colors from user input to a variety of output formats.`,
}), // "`Color`"),
page("api/transpiler", "Transpiler", {
description: `Bun exposes its internal transpiler as a pluggable API.`,
}), // "`Bun.Transpiler`"),
// divider("Dev Server"),
// page("bun-dev", "Vanilla"),

View File

@@ -32,7 +32,7 @@ pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
const gen = bun.gen.math; // "math" being this file's basename
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const JSC = bun.JSC;
```

View File

@@ -60,7 +60,7 @@ Visual Studio can be installed graphically using the wizard or through WinGet:
After Visual Studio, you need the following:
- LLVM 19.1.7
- LLVM 18.1.8
- Go
- Rust
- NASM
@@ -81,7 +81,7 @@ After Visual Studio, you need the following:
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl ccache
# scoop seems to be buggy if you install llvm and the rest at the same time
> scoop install llvm@19.1.7
> scoop install llvm@18.1.8
```
{% /codetabs %}

View File

@@ -104,7 +104,9 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:crypto`](https://nodejs.org/api/crypto.html)
🟡 Missing `secureHeapUsed` `setEngine` `setFips`
🟡 Missing `ECDH` `checkPrime` `checkPrimeSync` `generatePrime` `generatePrimeSync` `hkdf` `hkdfSync` `secureHeapUsed` `setEngine` `setFips`
Some methods are not optimized yet.
### [`node:domain`](https://nodejs.org/api/domain.html)
@@ -116,7 +118,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:module`](https://nodejs.org/api/module.html)
🟡 Missing `syncBuiltinESMExports`, `Module#load()`. Overriding `require.cache` is supported for ESM & CJS modules. `module._extensions`, `module._pathCache`, `module._cache` are no-ops. `module.register` is not implemented and we recommend using a [`Bun.plugin`](https://bun.sh/docs/runtime/plugins) in the meantime.
🟡 Missing `runMain` `syncBuiltinESMExports`, `Module#load()`. Overriding `require.cache` is supported for ESM & CJS modules. `module._extensions`, `module._pathCache`, `module._cache` are no-ops. `module.register` is not implemented and we recommend using a [`Bun.plugin`](https://bun.sh/docs/runtime/plugins) in the meantime.
### [`node:net`](https://nodejs.org/api/net.html)
@@ -172,7 +174,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:test`](https://nodejs.org/api/test.html)
🟡 Partly implemented. Missing mocks, snapshots, timers. Use [`bun:test`](https://bun.sh/docs/cli/test) instead.
🔴 Not implemented. Use [`bun:test`](https://bun.sh/docs/cli/test) instead.
### [`node:trace_events`](https://nodejs.org/api/tracing.html)
@@ -344,7 +346,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`process`](https://nodejs.org/api/process.html)
🟡 Mostly implemented. `process.binding` (internal Node.js bindings some packages rely on) is partially implemented. `process.title` is currently a no-op on macOS & Linux. `getActiveResourcesInfo` `setActiveResourcesInfo`, `getActiveResources` and `setSourceMapsEnabled` are stubs. Newer APIs like `process.loadEnvFile` and `process.getBuiltinModule` are not implemented yet.
🟡 Mostly implemented. `process.binding` (internal Node.js bindings some packages rely on) is partially implemented. `process.title` is a currently a no-op on macOS & Linux. `getActiveResourcesInfo` `setActiveResourcesInfo`, `getActiveResources` and `setSourceMapsEnabled` are stubs. Newer APIs like `process.loadEnvFile` and `process.getBuiltinModule` are not implemented yet.
### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask)
@@ -376,7 +378,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`require()`](https://nodejs.org/api/globals.html#require)
🟢 Fully implemented, including [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options).
🟢 Fully implemented, including [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options). `require.extensions` is a stub.
### [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)

View File

@@ -329,7 +329,7 @@ await Bun.build({
{% callout %}
**NOTE**: Plugin lifecycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
**NOTE**: Plugin lifcycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
```ts
await Bun.build({
@@ -400,7 +400,7 @@ type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml" | "object";
### Namespaces
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespace?
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace?
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.

View File

@@ -1,87 +0,0 @@
Configure `bun test` via `bunfig.toml` file and command-line options. This page documents the available configuration options for `bun test`.
## bunfig.toml options
You can configure `bun test` behavior by adding a `[test]` section to your `bunfig.toml` file:
```toml
[test]
# Options go here
```
### Test discovery
#### root
The `root` option specifies a root directory for test discovery, overriding the default behavior of scanning from the project root.
```toml
[test]
root = "src" # Only scan for tests in the src directory
```
### Reporters
#### reporter.junit
Configure the JUnit reporter output file path directly in the config file:
```toml
[test.reporter]
junit = "path/to/junit.xml" # Output path for JUnit XML report
```
This complements the `--reporter=junit` and `--reporter-outfile` CLI flags.
### Memory usage
#### smol
Enable the `--smol` memory-saving mode specifically for the test runner:
```toml
[test]
smol = true # Reduce memory usage during test runs
```
This is equivalent to using the `--smol` flag on the command line.
### Coverage options
In addition to the options documented in the [coverage documentation](./coverage.md), the following options are available:
#### coverageSkipTestFiles
Exclude files matching test patterns (e.g., \*.test.ts) from the coverage report:
```toml
[test]
coverageSkipTestFiles = true # Exclude test files from coverage reports
```
#### coverageThreshold (Object form)
The coverage threshold can be specified either as a number (as shown in the coverage documentation) or as an object with specific thresholds:
```toml
[test]
# Set specific thresholds for different coverage metrics
coverageThreshold = { lines = 0.9, functions = 0.8, statements = 0.85 }
```
Setting any of these enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
#### coverageIgnoreSourcemaps
Internally, Bun transpiles every file. That means code coverage must also go through sourcemaps before they can be reported. We expose this as a flag to allow you to opt out of this behavior, but it will be confusing because during the transpilation process, Bun may move code around and change variable names. This option is mostly useful for debugging coverage issues.
```toml
[test]
coverageIgnoreSourcemaps = true # Don't use sourcemaps for coverage analysis
```
When using this option, you probably want to stick a `// @bun` comment at the top of the source file to opt out of the transpilation process.
### Install settings inheritance
The `bun test` command inherits relevant network and installation configuration (registry, cafile, prefer, exact, etc.) from the `[install]` section of bunfig.toml. This is important if tests need to interact with private registries or require specific install behaviors triggered during the test run.

View File

@@ -52,22 +52,9 @@ It is possible to specify a coverage threshold in `bunfig.toml`. If your test su
coverageThreshold = 0.9
# to set different thresholds for lines and functions
coverageThreshold = { lines = 0.9, functions = 0.9, statements = 0.9 }
coverageThreshold = { lines = 0.9, functions = 0.9 }
```
Setting any of these thresholds enables `fail_on_low_coverage`, causing the test run to fail if coverage is below the threshold.
### Exclude test files from coverage
By default, test files themselves are included in coverage reports. You can exclude them with:
```toml
[test]
coverageSkipTestFiles = true # default false
```
This will exclude files matching test patterns (e.g., _.test.ts, _\_spec.js) from the coverage report.
### Sourcemaps
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
@@ -77,14 +64,6 @@ Internally, Bun transpiles all files by default, so Bun automatically generates
coverageIgnoreSourcemaps = true # default false
```
### Coverage defaults
By default, coverage reports:
1. Exclude `node_modules` directories
2. Exclude files loaded via non-JS/TS loaders (e.g., .css, .txt) unless a custom JS loader is specified
3. Include test files themselves (can be disabled with `coverageSkipTestFiles = true` as shown above)
### Coverage reporters
By default, coverage reports will be printed to the console.

View File

@@ -1,85 +0,0 @@
bun test's file discovery mechanism determines which files to run as tests. Understanding how it works helps you structure your test files effectively.
## Default Discovery Logic
By default, `bun test` recursively searches the project directory for files that match specific patterns:
- `*.test.{js|jsx|ts|tsx}` - Files ending with `.test.js`, `.test.jsx`, `.test.ts`, or `.test.tsx`
- `*_test.{js|jsx|ts|tsx}` - Files ending with `_test.js`, `_test.jsx`, `_test.ts`, or `_test.tsx`
- `*.spec.{js|jsx|ts|tsx}` - Files ending with `.spec.js`, `.spec.jsx`, `.spec.ts`, or `.spec.tsx`
- `*_spec.{js|jsx|ts|tsx}` - Files ending with `_spec.js`, `_spec.jsx`, `_spec.ts`, or `_spec.tsx`
## Exclusions
By default, Bun test ignores:
- `node_modules` directories
- Hidden directories (those starting with a period `.`)
- Files that don't have JavaScript-like extensions (based on available loaders)
## Customizing Test Discovery
### Position Arguments as Filters
You can filter which test files run by passing additional positional arguments to `bun test`:
```bash
$ bun test <filter> <filter> ...
```
Any test file with a path that contains one of the filters will run. These filters are simple substring matches, not glob patterns.
For example, to run all tests in a `utils` directory:
```bash
$ bun test utils
```
This would match files like `src/utils/string.test.ts` and `lib/utils/array_test.js`.
### Specifying Exact File Paths
To run a specific file in the test runner, make sure the path starts with `./` or `/` to distinguish it from a filter name:
```bash
$ bun test ./test/specific-file.test.ts
```
### Filter by Test Name
To filter tests by name rather than file path, use the `-t`/`--test-name-pattern` flag with a regex pattern:
```sh
# run all tests with "addition" in the name
$ bun test --test-name-pattern addition
```
The pattern is matched against a concatenated string of the test name prepended with the labels of all its parent describe blocks, separated by spaces. For example, a test defined as:
```js
describe("Math", () => {
describe("operations", () => {
test("should add correctly", () => {
// ...
});
});
});
```
Would be matched against the string "Math operations should add correctly".
### Changing the Root Directory
By default, Bun looks for test files starting from the current working directory. You can change this with the `root` option in your `bunfig.toml`:
```toml
[test]
root = "src" # Only scan for tests in the src directory
```
## Execution Order
Tests are run in the following order:
1. Test files are executed sequentially (not in parallel)
2. Within each file, tests run sequentially based on their definition order

View File

@@ -56,9 +56,9 @@ The following properties and methods are implemented on mock functions.
- [x] [mockFn.mock.instances](https://jestjs.io/docs/mock-function-api#mockfnmockinstances)
- [x] [mockFn.mock.contexts](https://jestjs.io/docs/mock-function-api#mockfnmockcontexts)
- [x] [mockFn.mock.lastCall](https://jestjs.io/docs/mock-function-api#mockfnmocklastcall)
- [x] [mockFn.mockClear()](https://jestjs.io/docs/mock-function-api#mockfnmockclear) - Clears call history
- [x] [mockFn.mockReset()](https://jestjs.io/docs/mock-function-api#mockfnmockreset) - Clears call history and removes implementation
- [x] [mockFn.mockRestore()](https://jestjs.io/docs/mock-function-api#mockfnmockrestore) - Restores original implementation
- [x] [mockFn.mockClear()](https://jestjs.io/docs/mock-function-api#mockfnmockclear)
- [x] [mockFn.mockReset()](https://jestjs.io/docs/mock-function-api#mockfnmockreset)
- [x] [mockFn.mockRestore()](https://jestjs.io/docs/mock-function-api#mockfnmockrestore)
- [x] [mockFn.mockImplementation(fn)](https://jestjs.io/docs/mock-function-api#mockfnmockimplementationfn)
- [x] [mockFn.mockImplementationOnce(fn)](https://jestjs.io/docs/mock-function-api#mockfnmockimplementationoncefn)
- [x] [mockFn.mockName(name)](https://jestjs.io/docs/mock-function-api#mockfnmocknamename)
@@ -197,59 +197,7 @@ After resolution, the mocked module is stored in the ES Module registry **and**
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
### Module Mock Implementation Details
Understanding how `mock.module()` works helps you use it more effectively:
1. **Cache Interaction**: Module mocks interacts with both ESM and CommonJS module caches.
2. **Lazy Evaluation**: The mock factory callback is only evaluated when the module is actually imported or required.
3. **Path Resolution**: Bun automatically resolves the module specifier as though you were doing an import, supporting:
- Relative paths (`'./module'`)
- Absolute paths (`'/path/to/module'`)
- Package names (`'lodash'`)
4. **Import Timing Effects**:
- When mocking before first import: No side effects from the original module occur
- When mocking after import: The original module's side effects have already happened
- For this reason, using `--preload` is recommended for mocks that need to prevent side effects
5. **Live Bindings**: Mocked ESM modules maintain live bindings, so changing the mock will update all existing imports
## Global Mock Functions
### Clear all mocks with `mock.clearAllMocks()`
Reset all mock function state (calls, results, etc.) without restoring their original implementation:
```ts
import { expect, mock, test } from "bun:test";
const random1 = mock(() => Math.random());
const random2 = mock(() => Math.random());
test("clearing all mocks", () => {
random1();
random2();
expect(random1).toHaveBeenCalledTimes(1);
expect(random2).toHaveBeenCalledTimes(1);
mock.clearAllMocks();
expect(random1).toHaveBeenCalledTimes(0);
expect(random2).toHaveBeenCalledTimes(0);
// Note: implementations are preserved
expect(typeof random1()).toBe("number");
expect(typeof random2()).toBe("number");
});
```
This resets the `.mock.calls`, `.mock.instances`, `.mock.contexts`, and `.mock.results` properties of all mocks, but unlike `mock.restore()`, it does not restore the original implementation.
### Restore all function mocks with `mock.restore()`
## Restore all function mocks to their original values with `mock.restore()`
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
@@ -286,28 +234,3 @@ test('foo, bar, baz', () => {
expect(bazSpy).toBe('baz');
});
```
## Vitest Compatibility
For added compatibility with tests written for [Vitest](https://vitest.dev/), Bun provides the `vi` global object as an alias for parts of the Jest mocking API:
```ts
import { test, expect } from "bun:test";
// Using the 'vi' alias similar to Vitest
test("vitest compatibility", () => {
const mockFn = vi.fn(() => 42);
mockFn();
expect(mockFn).toHaveBeenCalled();
// The following functions are available on the vi object:
// vi.fn
// vi.spyOn
// vi.mock
// vi.restoreAllMocks
// vi.clearAllMocks
});
```
This makes it easier to port tests from Vitest to Bun without having to rewrite all your mocks.

View File

@@ -1,108 +0,0 @@
bun test supports different output formats through reporters. This document covers both built-in reporters and how to implement your own custom reporters.
## Built-in Reporters
### Default Console Reporter
By default, bun test outputs results to the console in a human-readable format:
```sh
test/package-json-lint.test.ts:
✓ test/package.json [0.88ms]
✓ test/js/third_party/grpc-js/package.json [0.18ms]
✓ test/js/third_party/svelte/package.json [0.21ms]
✓ test/js/third_party/express/package.json [1.05ms]
4 pass
0 fail
4 expect() calls
Ran 4 tests in 1.44ms
```
When a terminal doesn't support colors, the output avoids non-ascii characters:
```sh
test/package-json-lint.test.ts:
(pass) test/package.json [0.48ms]
(pass) test/js/third_party/grpc-js/package.json [0.10ms]
(pass) test/js/third_party/svelte/package.json [0.04ms]
(pass) test/js/third_party/express/package.json [0.04ms]
4 pass
0 fail
4 expect() calls
Ran 4 tests across 1 files. [0.66ms]
```
### JUnit XML Reporter
For CI/CD environments, Bun supports generating JUnit XML reports. JUnit XML is a widely-adopted format for test results that can be parsed by many CI/CD systems, including GitLab, Jenkins, and others.
#### Using the JUnit Reporter
To generate a JUnit XML report, use the `--reporter=junit` flag along with `--reporter-outfile` to specify the output file:
```sh
$ bun test --reporter=junit --reporter-outfile=./junit.xml
```
This continues to output to the console as usual while also writing the JUnit XML report to the specified path at the end of the test run.
#### Configuring via bunfig.toml
You can also configure the JUnit reporter in your `bunfig.toml` file:
```toml
[test.reporter]
junit = "path/to/junit.xml" # Output path for JUnit XML report
```
#### Environment Variables in JUnit Reports
The JUnit reporter automatically includes environment information as `<properties>` in the XML output. This can be helpful for tracking test runs in CI environments.
Specifically, it includes the following environment variables when available:
| Environment Variable | Property Name | Description |
| ----------------------------------------------------------------------- | ------------- | ---------------------- |
| `GITHUB_RUN_ID`, `GITHUB_SERVER_URL`, `GITHUB_REPOSITORY`, `CI_JOB_URL` | `ci` | CI build information |
| `GITHUB_SHA`, `CI_COMMIT_SHA`, `GIT_SHA` | `commit` | Git commit identifiers |
| System hostname | `hostname` | Machine hostname |
This makes it easier to track which environment and commit a particular test run was for.
#### Current Limitations
The JUnit reporter currently has a few limitations that will be addressed in future updates:
- `stdout` and `stderr` output from individual tests are not included in the report
- Precise timestamp fields per test case are not included
### GitHub Actions reporter
Bun test automatically detects when it's running inside GitHub Actions and emits GitHub Actions annotations to the console directly. No special configuration is needed beyond installing Bun and running `bun test`.
For a GitHub Actions workflow configuration example, see the [CI/CD integration](../cli/test.md#cicd-integration) section of the CLI documentation.
## Custom Reporters
Bun allows developers to implement custom test reporters by extending the WebKit Inspector Protocol with additional testing-specific domains.
### Inspector Protocol for Testing
To support test reporting, Bun extends the standard WebKit Inspector Protocol with two custom domains:
1. **TestReporter**: Reports test discovery, execution start, and completion events
2. **LifecycleReporter**: Reports errors and exceptions during test execution
These extensions allow you to build custom reporting tools that can receive detailed information about test execution in real-time.
### Key Events
Custom reporters can listen for these key events:
- `TestReporter.found`: Emitted when a test is discovered
- `TestReporter.start`: Emitted when a test starts running
- `TestReporter.end`: Emitted when a test completes
- `Console.messageAdded`: Emitted when console output occurs during a test
- `LifecycleReporter.error`: Emitted when an error or exception occurs

View File

@@ -1,93 +0,0 @@
`bun test` is deeply integrated with Bun's runtime. This is part of what makes `bun test` fast and simple to use.
#### `$NODE_ENV` environment variable
`bun test` automatically sets `$NODE_ENV` to `"test"` unless it's already set in the environment or via .env files. This is standard behavior for most test runners and helps ensure consistent test behavior.
```ts
import { test, expect } from "bun:test";
test("NODE_ENV is set to test", () => {
expect(process.env.NODE_ENV).toBe("test");
});
```
#### `$TZ` environment variable
By default, all `bun test` runs use UTC (`Etc/UTC`) as the time zone unless overridden by the `TZ` environment variable. This ensures consistent date and time behavior across different development environments.
#### Test Timeouts
Each test has a default timeout of 5000ms (5 seconds) if not explicitly overridden. Tests that exceed this timeout will fail. This can be changed globally with the `--timeout` flag or per-test as the third parameter to the test function.
## Error Handling
### Unhandled Errors
`bun test` tracks unhandled promise rejections and errors that occur between tests. If such errors occur, the final exit code will be non-zero (specifically, the count of such errors), even if all tests pass.
This helps catch errors in asynchronous code that might otherwise go unnoticed:
```ts
import { test } from "bun:test";
test("test 1", () => {
// This test passes
});
// This error happens outside any test
setTimeout(() => {
throw new Error("Unhandled error");
}, 0);
test("test 2", () => {
// This test also passes
});
// The test run will still fail with a non-zero exit code
// because of the unhandled error
```
Internally, this occurs with a higher precedence than `process.on("unhandledRejection")` or `process.on("uncaughtException")`, which makes it simpler to integrate with existing code.
## Using General CLI Flags with Tests
Several Bun CLI flags can be used with `bun test` to modify its behavior:
### Memory Usage
- `--smol`: Reduces memory usage for the test runner VM
### Debugging
- `--inspect`, `--inspect-brk`: Attaches the debugger to the test runner process
### Module Loading
- `--preload`: Runs scripts before test files (useful for global setup/mocks)
- `--define`: Sets compile-time constants
- `--loader`: Configures custom loaders
- `--tsconfig-override`: Uses a different tsconfig
- `--conditions`: Sets package.json conditions for module resolution
- `--env-file`: Loads environment variables for tests
### Installation-related Flags
- `--prefer-offline`, `--frozen-lockfile`, etc.: Affect any network requests or auto-installs during test execution
## Watch and Hot Reloading
When running `bun test` with the `--watch` flag, the test runner will watch for file changes and re-run affected tests.
The `--hot` flag provides similar functionality but is more aggressive about trying to preserve state between runs. For most test scenarios, `--watch` is the recommended option.
## Global Variables
The following globals are automatically available in test files without importing (though they can be imported from `bun:test` if preferred):
- `test`, `it`: Define tests
- `describe`: Group tests
- `expect`: Make assertions
- `beforeAll`, `beforeEach`, `afterAll`, `afterEach`: Lifecycle hooks
- `jest`: Jest global object
- `vi`: Vitest compatibility alias for common jest methods

View File

@@ -1,7 +1,3 @@
Snapshot testing saves the output of a value and compares it against future test runs. This is particularly useful for UI components, complex objects, or any output that needs to remain consistent.
## Basic snapshots
Snapshot tests are written using the `.toMatchSnapshot()` matcher:
```ts
@@ -17,52 +13,3 @@ The first time this test is run, the argument to `expect` will be serialized and
```bash
$ bun test --update-snapshots
```
## Inline snapshots
For smaller values, you can use inline snapshots with `.toMatchInlineSnapshot()`. These snapshots are stored directly in your test file:
```ts
import { test, expect } from "bun:test";
test("inline snapshot", () => {
// First run: snapshot will be inserted automatically
expect({ hello: "world" }).toMatchInlineSnapshot();
// After first run, the test file will be updated to:
// expect({ hello: "world" }).toMatchInlineSnapshot(`
// {
// "hello": "world",
// }
// `);
});
```
When you run the test, Bun automatically updates the test file itself with the generated snapshot string. This makes the tests more portable and easier to understand, since the expected output is right next to the test.
### Using inline snapshots
1. Write your test with `.toMatchInlineSnapshot()`
2. Run the test once
3. Bun automatically updates your test file with the snapshot
4. On subsequent runs, the value will be compared against the inline snapshot
Inline snapshots are particularly useful for small, simple values where it's helpful to see the expected output right in the test file.
## Error snapshots
You can also snapshot error messages using `.toThrowErrorMatchingSnapshot()` and `.toThrowErrorMatchingInlineSnapshot()`:
```ts
import { test, expect } from "bun:test";
test("error snapshot", () => {
expect(() => {
throw new Error("Something went wrong");
}).toThrowErrorMatchingSnapshot();
expect(() => {
throw new Error("Another error");
}).toThrowErrorMatchingInlineSnapshot();
});
```

View File

@@ -74,29 +74,9 @@ test("it was 2020, for a moment.", () => {
});
```
## Get mocked time with `jest.now()`
When you're using mocked time (with `setSystemTime` or `useFakeTimers`), you can use `jest.now()` to get the current mocked timestamp:
```ts
import { test, expect, jest } from "bun:test";
test("get the current mocked time", () => {
jest.useFakeTimers();
jest.setSystemTime(new Date("2020-01-01T00:00:00.000Z"));
expect(Date.now()).toBe(1577836800000); // Jan 1, 2020 timestamp
expect(jest.now()).toBe(1577836800000); // Same value
jest.useRealTimers();
});
```
This is useful when you need to access the mocked time directly without creating a new Date object.
## Set the time zone
By default, the time zone for all `bun test` runs is set to UTC (`Etc/UTC`) unless overridden. To change the time zone, either pass the `$TZ` environment variable to `bun test`.
To change the time zone, either pass the `$TZ` environment variable to `bun test`.
```sh
TZ=America/Los_Angeles bun test

View File

@@ -78,11 +78,9 @@ test("wat", async () => {
In `bun:test`, test timeouts throw an uncatchable exception to force the test to stop running and fail. We also kill any child processes that were spawned in the test to avoid leaving behind zombie processes lurking in the background.
The default timeout for each test is 5000ms (5 seconds) if not overridden by this timeout option or `jest.setDefaultTimeout()`.
### 🧟 Zombie process killer
When a test times out and processes spawned in the test via `Bun.spawn`, `Bun.spawnSync`, or `node:child_process` are not killed, they will be automatically killed and a message will be logged to the console. This prevents zombie processes from lingering in the background after timed-out tests.
When a test times out and processes spawned in the test via `Bun.spawn`, `Bun.spawnSync`, or `node:child_process` are not killed, they will be automatically killed and a message will be logged to the console.
## `test.skip`
@@ -127,7 +125,7 @@ fix the test.
## `test.only`
To run a particular test or suite of tests use `test.only()` or `describe.only()`.
To run a particular test or suite of tests use `test.only()` or `describe.only()`. Once declared, running `bun test --only` will only execute tests/suites that have been marked with `.only()`. Running `bun test` without the `--only` option with `test.only()` declared will result in all tests in the given suite being executed _up to_ the test with `.only()`. `describe.only()` functions the same in both execution scenarios.
```ts
import { test, describe } from "bun:test";
@@ -199,121 +197,22 @@ test.todoIf(macOS)("runs on posix", () => {
});
```
## `test.failing`
## `test.each`
Use `test.failing()` when you know a test is currently failing but you want to track it and be notified when it starts passing. This inverts the test result:
- A failing test marked with `.failing()` will pass
- A passing test marked with `.failing()` will fail (with a message indicating it's now passing and should be fixed)
```ts
// This will pass because the test is failing as expected
test.failing("math is broken", () => {
expect(0.1 + 0.2).toBe(0.3); // fails due to floating point precision
});
// This will fail with a message that the test is now passing
test.failing("fixed bug", () => {
expect(1 + 1).toBe(2); // passes, but we expected it to fail
});
```
This is useful for tracking known bugs that you plan to fix later, or for implementing test-driven development.
## Conditional Tests for Describe Blocks
The conditional modifiers `.if()`, `.skipIf()`, and `.todoIf()` can also be applied to `describe` blocks, affecting all tests within the suite:
```ts
const isMacOS = process.platform === "darwin";
// Only runs the entire suite on macOS
describe.if(isMacOS)("macOS-specific features", () => {
test("feature A", () => {
// only runs on macOS
});
test("feature B", () => {
// only runs on macOS
});
});
// Skips the entire suite on Windows
describe.skipIf(process.platform === "win32")("Unix features", () => {
test("feature C", () => {
// skipped on Windows
});
});
// Marks the entire suite as TODO on Linux
describe.todoIf(process.platform === "linux")("Upcoming Linux support", () => {
test("feature D", () => {
// marked as TODO on Linux
});
});
```
## `test.each` and `describe.each`
To run the same test with multiple sets of data, use `test.each`. This creates a parametrized test that runs once for each test case provided.
To return a function for multiple cases in a table of tests, use `test.each`.
```ts
const cases = [
[1, 2, 3],
[3, 4, 7],
[3, 4, 5],
];
test.each(cases)("%p + %p should be %p", (a, b, expected) => {
expect(a + b).toBe(expected);
// runs once for each test case provided
});
```
You can also use `describe.each` to create a parametrized suite that runs once for each test case:
```ts
describe.each([
[1, 2, 3],
[3, 4, 7],
])("add(%i, %i)", (a, b, expected) => {
test(`returns ${expected}`, () => {
expect(a + b).toBe(expected);
});
test(`sum is greater than each value`, () => {
expect(a + b).toBeGreaterThan(a);
expect(a + b).toBeGreaterThan(b);
});
});
```
### Argument Passing
How arguments are passed to your test function depends on the structure of your test cases:
- If a table row is an array (like `[1, 2, 3]`), each element is passed as an individual argument
- If a row is not an array (like an object), it's passed as a single argument
```ts
// Array items passed as individual arguments
test.each([
[1, 2, 3],
[4, 5, 9],
])("add(%i, %i) = %i", (a, b, expected) => {
expect(a + b).toBe(expected);
});
// Object items passed as a single argument
test.each([
{ a: 1, b: 2, expected: 3 },
{ a: 4, b: 5, expected: 9 },
])("add($a, $b) = $expected", data => {
expect(data.a + data.b).toBe(data.expected);
});
```
### Format Specifiers
There are a number of options available for formatting the test title:
There are a number of options available for formatting the case label depending on its type.
{% table %}
@@ -364,68 +263,6 @@ There are a number of options available for formatting the test title:
{% /table %}
#### Examples
```ts
// Basic specifiers
test.each([
["hello", 123],
["world", 456],
])("string: %s, number: %i", (str, num) => {
// "string: hello, number: 123"
// "string: world, number: 456"
});
// %p for pretty-format output
test.each([
[{ name: "Alice" }, { a: 1, b: 2 }],
[{ name: "Bob" }, { x: 5, y: 10 }],
])("user %p with data %p", (user, data) => {
// "user { name: 'Alice' } with data { a: 1, b: 2 }"
// "user { name: 'Bob' } with data { x: 5, y: 10 }"
});
// %# for index
test.each(["apple", "banana"])("fruit #%# is %s", fruit => {
// "fruit #0 is apple"
// "fruit #1 is banana"
});
```
## Assertion Counting
Bun supports verifying that a specific number of assertions were called during a test:
### expect.hasAssertions()
Use `expect.hasAssertions()` to verify that at least one assertion is called during a test:
```ts
test("async work calls assertions", async () => {
expect.hasAssertions(); // Will fail if no assertions are called
const data = await fetchData();
expect(data).toBeDefined();
});
```
This is especially useful for async tests to ensure your assertions actually run.
### expect.assertions(count)
Use `expect.assertions(count)` to verify that a specific number of assertions are called during a test:
```ts
test("exactly two assertions", () => {
expect.assertions(2); // Will fail if not exactly 2 assertions are called
expect(1 + 1).toBe(2);
expect("hello").toContain("ell");
});
```
This helps ensure all your assertions run, especially in complex async code with multiple code paths.
## Matchers
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).

View File

@@ -17,7 +17,7 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
```jsonc
{
"compilerOptions": {
// Environment setup & latest features
// Enable latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
@@ -35,13 +35,12 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false,
},
// Some stricter flags
"noUnusedLocals": true,
"noUnusedParameters": true,
"noPropertyAccessFromIndexSignature": true
}
}
```

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,142 +0,0 @@
# pretty printing for the standard library.
# put "source /path/to/stage2_gdb_pretty_printers.py" in ~/.gdbinit to load it automatically.
import re
import gdb.printing
# Handles both ArrayList and ArrayListUnmanaged.
class ArrayListPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
type = self.val.type.name[len('std.array_list.'):]
type = re.sub(r'^ArrayListAligned(Unmanaged)?\((.*),null\)$', r'ArrayList\1(\2)', type)
return '%s of length %s, capacity %s' % (type, self.val['items']['len'], self.val['capacity'])
def children(self):
for i in range(self.val['items']['len']):
item = self.val['items']['ptr'] + i
yield ('[%d]' % i, item.dereference())
def display_hint(self):
return 'array'
class MultiArrayListPrinter:
def __init__(self, val):
self.val = val
def child_type(self):
(helper_fn, _) = gdb.lookup_symbol('%s.dbHelper' % self.val.type.name)
return helper_fn.type.fields()[1].type.target()
def to_string(self):
type = self.val.type.name[len('std.multi_array_list.'):]
return '%s of length %s, capacity %s' % (type, self.val['len'], self.val['capacity'])
def slice(self):
fields = self.child_type().fields()
base = self.val['bytes']
cap = self.val['capacity']
len = self.val['len']
if len == 0:
return
fields = sorted(fields, key=lambda field: field.type.alignof, reverse=True)
for field in fields:
ptr = base.cast(field.type.pointer()).dereference().cast(field.type.array(len - 1))
base += field.type.sizeof * cap
yield (field.name, ptr)
def children(self):
for i, (name, ptr) in enumerate(self.slice()):
yield ('[%d]' % i, name)
yield ('[%d]' % i, ptr)
def display_hint(self):
return 'map'
# Handles both HashMap and HashMapUnmanaged.
class HashMapPrinter:
def __init__(self, val):
self.type = val.type
is_managed = re.search(r'^std\.hash_map\.HashMap\(', self.type.name)
self.val = val['unmanaged'] if is_managed else val
def header_ptr_type(self):
(helper_fn, _) = gdb.lookup_symbol('%s.dbHelper' % self.val.type.name)
return helper_fn.type.fields()[1].type
def header(self):
if self.val['metadata'] == 0:
return None
return (self.val['metadata'].cast(self.header_ptr_type()) - 1).dereference()
def to_string(self):
type = self.type.name[len('std.hash_map.'):]
type = re.sub(r'^HashMap(Unmanaged)?\((.*),std.hash_map.AutoContext\(.*$', r'AutoHashMap\1(\2)', type)
hdr = self.header()
if hdr is not None:
cap = hdr['capacity']
else:
cap = 0
return '%s of length %s, capacity %s' % (type, self.val['size'], cap)
def children(self):
hdr = self.header()
if hdr is None:
return
is_map = self.display_hint() == 'map'
for i in range(hdr['capacity']):
metadata = self.val['metadata'] + i
if metadata.dereference()['used'] == 1:
yield ('[%d]' % i, (hdr['keys'] + i).dereference())
if is_map:
yield ('[%d]' % i, (hdr['values'] + i).dereference())
def display_hint(self):
for field in self.header_ptr_type().target().fields():
if field.name == 'values':
return 'map'
return 'array'
# Handles both ArrayHashMap and ArrayHashMapUnmanaged.
class ArrayHashMapPrinter:
def __init__(self, val):
self.type = val.type
is_managed = re.search(r'^std\.array_hash_map\.ArrayHashMap\(', self.type.name)
self.val = val['unmanaged'] if is_managed else val
def to_string(self):
type = self.type.name[len('std.array_hash_map.'):]
type = re.sub(r'^ArrayHashMap(Unmanaged)?\((.*),std.array_hash_map.AutoContext\(.*$', r'AutoArrayHashMap\1(\2)', type)
return '%s of length %s' % (type, self.val['entries']['len'])
def children(self):
entries = MultiArrayListPrinter(self.val['entries'])
len = self.val['entries']['len']
fields = {}
for name, ptr in entries.slice():
fields[str(name)] = ptr
for i in range(len):
if 'key' in fields:
yield ('[%d]' % i, fields['key'][i])
else:
yield ('[%d]' % i, '{}')
if 'value' in fields:
yield ('[%d]' % i, fields['value'][i])
def display_hint(self):
for name, ptr in MultiArrayListPrinter(self.val['entries']).slice():
if name == 'value':
return 'map'
return 'array'
pp = gdb.printing.RegexpCollectionPrettyPrinter('Zig standard library')
pp.add_printer('ArrayList', r'^std\.array_list\.ArrayListAligned(Unmanaged)?\(.*\)$', ArrayListPrinter)
pp.add_printer('MultiArrayList', r'^std\.multi_array_list\.MultiArrayList\(.*\)$', MultiArrayListPrinter)
pp.add_printer('HashMap', r'^std\.hash_map\.HashMap(Unmanaged)?\(.*\)$', HashMapPrinter)
pp.add_printer('ArrayHashMap', r'^std\.array_hash_map\.ArrayHashMap(Unmanaged)?\(.*\)$', ArrayHashMapPrinter)
gdb.printing.register_pretty_printer(gdb.current_objfile(), pp)

View File

@@ -1,63 +0,0 @@
# pretty printing for the language.
# put "source /path/to/zig_gdb_pretty_printers.py" in ~/.gdbinit to load it automatically.
import gdb.printing
class ZigPrettyPrinter(gdb.printing.PrettyPrinter):
def __init__(self):
super().__init__('Zig')
def __call__(self, val):
tag = val.type.tag
if tag is None:
return None
if tag == '[]u8':
return StringPrinter(val)
if tag.startswith('[]'):
return SlicePrinter(val)
if tag.startswith('?'):
return OptionalPrinter(val)
return None
class SlicePrinter:
def __init__(self, val):
self.val = val
def to_string(self):
return f"{self.val['len']} items at {self.val['ptr']}"
def children(self):
def it(val):
for i in range(int(val['len'])):
item = val['ptr'] + i
yield (f'[{i}]', item.dereference())
return it(self.val)
def display_hint(self):
return 'array'
class StringPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
return self.val['ptr'].string(length=int(self.val['len']))
def display_hint(self):
return 'string'
class OptionalPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
if self.val['some']:
return self.val['data']
else:
return 'null'
gdb.printing.register_pretty_printer(gdb.current_objfile(), ZigPrettyPrinter())

View File

@@ -98,7 +98,7 @@ chunks.push(`// Auto-generated file. Do not edit.
// This used to be a comptime block, but it made the build too slow.
// Compressing the completions list saves about 100 KB of binary size.
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const zstd = bun.zstd;
const Environment = bun.Environment;

View File

@@ -1,5 +1,5 @@
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -0,0 +1,13 @@
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
# (-p), but do not stop the process (-s) or notify the user (-n).
#
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
# it. So stopping the process would just create noise when debugging any long-running script.
process handle -p true -s false -n false SIGPWR
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py

View File

@@ -61,6 +61,7 @@ zig_keywords = {
'try',
'union',
'unreachable',
'usingnamespace',
'var',
'volatile',
'while',

View File

@@ -329,7 +329,7 @@ def btjs(debugger, command, result, internal_dict):
addressFormat = '#0{width}x'.format(width=target.GetAddressByteSize() * 2 + 2)
process = target.GetProcess()
thread = process.GetSelectedThread()
jscModule = target.module["JavaScriptCore"] or target.module["bun"] or target.module["bun-debug"]
jscModule = target.module["JavaScriptCore"]
if jscModule.FindSymbol("JSC::CallFrame::describeFrame").GetSize() or jscModule.FindSymbol("_ZN3JSC9CallFrame13describeFrameEv").GetSize():
annotateJSFrames = True

View File

@@ -1,6 +1,6 @@
// most of this file is copy pasted from other files in misctools
const std = @import("std");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const path_handler = @import("../src/resolver/resolve_path.zig");
const bun = @import("bun");
const bun = @import("root").bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;

Some files were not shown because too many files have changed in this diff Show More