diff --git a/.aikido b/.aikido new file mode 100644 index 0000000000..db335af624 --- /dev/null +++ b/.aikido @@ -0,0 +1,19 @@ +exclude: + paths: + - test + - scripts + - bench + - packages/bun-lambda + - packages/bun-release + - packages/bun-wasm + - packages/bun-vscode + - packages/bun-plugin-yaml + - packages/bun-plugin-svelte + - packages/bun-native-plugin-rs + - packages/bun-native-bundler-plugin-api + - packages/bun-inspector-protocol + - packages/bun-inspector-frontend + - packages/bun-error + - packages/bun-debug-adapter-protocol + - packages/bun-build-mdx-rs + - packages/@types/bun diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 033aec633d..2b5f944834 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -26,7 +26,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ wget curl git python3 python3-pip ninja-build \ software-properties-common apt-transport-https \ ca-certificates gnupg lsb-release unzip \ - libxml2-dev ruby ruby-dev bison gawk perl make golang \ + libxml2-dev ruby ruby-dev bison gawk perl make golang ccache \ && add-apt-repository ppa:ubuntu-toolchain-r/test \ && apt-get update \ && apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \ @@ -35,7 +35,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ && wget https://apt.llvm.org/llvm.sh \ && chmod +x llvm.sh \ && ./llvm.sh ${LLVM_VERSION} all \ - && rm llvm.sh + && rm llvm.sh \ + && rm -rf /var/lib/apt/lists/* RUN --mount=type=tmpfs,target=/tmp \ @@ -48,14 +49,6 @@ RUN --mount=type=tmpfs,target=/tmp \ wget -O /tmp/cmake.sh "$cmake_url" && \ sh /tmp/cmake.sh --skip-license --prefix=/usr -RUN --mount=type=tmpfs,target=/tmp \ - sccache_version="0.12.0" && \ - arch=$(uname -m) && \ - sccache_url="https://github.com/mozilla/sccache/releases/download/v${sccache_version}/sccache-v${sccache_version}-${arch}-unknown-linux-musl.tar.gz" && \ - wget -O /tmp/sccache.tar.gz "$sccache_url" && \ - tar -xzf /tmp/sccache.tar.gz -C /tmp && \ - install -m755 /tmp/sccache-v${sccache_version}-${arch}-unknown-linux-musl/sccache /usr/local/bin - RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \ --slave /usr/bin/g++ g++ /usr/bin/g++-13 \ --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \ @@ -134,9 +127,7 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun # The following is necessary to configure buildkite to use a stable -# checkout directory. sccache hashes absolute paths into its cache keys, -# so if buildkite uses a different checkout path each time (which it does -# by default), sccache will be useless. +# checkout directory for ccache to be effective. RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \ cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment #!/bin/sh diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index bf346e6c69..007b35c306 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -124,16 +124,13 @@ const testPlatforms = [ { os: "darwin", arch: "aarch64", release: "13", tier: "previous" }, { os: "darwin", arch: "x64", release: "14", tier: "latest" }, { os: "darwin", arch: "x64", release: "13", tier: "previous" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" }, - { os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" }, - { os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "13", tier: "latest" }, + { os: "linux", arch: "x64", distro: "debian", release: "13", tier: "latest" }, + { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "13", tier: "latest" }, + { os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "13", tier: "latest" }, { os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" }, { os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" }, @@ -574,6 +571,7 @@ function getTestBunStep(platform, options, testOptions = {}) { if (buildId) { args.push(`--build-id=${buildId}`); } + if (testFiles) { args.push(...testFiles.map(testFile => `--include=${testFile}`)); } @@ -1072,7 +1070,7 @@ async function getPipeline(options = {}) { const imagePlatforms = new Map( buildImages || publishImages ? [...buildPlatforms, ...testPlatforms] - .filter(({ os }) => os === "linux" || os === "windows") + .filter(({ os }) => os !== "darwin") .map(platform => [getImageKey(platform), platform]) : [], ); @@ -1106,7 +1104,7 @@ async function getPipeline(options = {}) { const includeASAN = !isMainBranch(); if (!buildId) { - const relevantBuildPlatforms = includeASAN + let relevantBuildPlatforms = includeASAN ? buildPlatforms : buildPlatforms.filter(({ profile }) => profile !== "asan"); diff --git a/.claude/skills/implementing-jsc-classes-cpp/SKILL.md b/.claude/skills/implementing-jsc-classes-cpp/SKILL.md new file mode 100644 index 0000000000..27a2e5fd4d --- /dev/null +++ b/.claude/skills/implementing-jsc-classes-cpp/SKILL.md @@ -0,0 +1,184 @@ +--- +name: implementing-jsc-classes-cpp +description: Implements JavaScript classes in C++ using JavaScriptCore. Use when creating new JS classes with C++ bindings, prototypes, or constructors. +--- + +# Implementing JavaScript Classes in C++ + +## Class Structure + +For publicly accessible Constructor and Prototype, create 3 classes: + +1. **`class Foo : public JSC::DestructibleObject`** - if C++ fields exist; otherwise use `JSC::constructEmptyObject` with `putDirectOffset` +2. **`class FooPrototype : public JSC::JSNonFinalObject`** +3. **`class FooConstructor : public JSC::InternalFunction`** + +No public constructor? Only Prototype and class needed. + +## Iso Subspaces + +Classes with C++ fields need subspaces in: + +- `src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h` +- `src/bun.js/bindings/webcore/DOMIsoSubspaces.h` + +```cpp +template +static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::subspaceForImpl( + vm, + [](auto& spaces) { return spaces.m_clientSubspaceForMyClassT.get(); }, + [](auto& spaces, auto&& space) { spaces.m_clientSubspaceForMyClassT = std::forward(space); }, + [](auto& spaces) { return spaces.m_subspaceForMyClassT.get(); }, + [](auto& spaces, auto&& space) { spaces.m_subspaceForMyClassT = std::forward(space); }); +} +``` + +## Property Definitions + +```cpp +static JSC_DECLARE_HOST_FUNCTION(jsFooProtoFuncMethod); +static JSC_DECLARE_CUSTOM_GETTER(jsFooGetter_property); + +static const HashTableValue JSFooPrototypeTableValues[] = { + { "property"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsFooGetter_property, 0 } }, + { "method"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsFooProtoFuncMethod, 1 } }, +}; +``` + +## Prototype Class + +```cpp +class JSFooPrototype final : public JSC::JSNonFinalObject { +public: + using Base = JSC::JSNonFinalObject; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + static JSFooPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) { + JSFooPrototype* prototype = new (NotNull, allocateCell(vm)) JSFooPrototype(vm, structure); + prototype->finishCreation(vm); + return prototype; + } + + template + static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.plainObjectSpace(); } + + DECLARE_INFO; + + static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) { + auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); + structure->setMayBePrototype(true); + return structure; + } + +private: + JSFooPrototype(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure) {} + void finishCreation(JSC::VM& vm); +}; + +void JSFooPrototype::finishCreation(VM& vm) { + Base::finishCreation(vm); + reifyStaticProperties(vm, JSFoo::info(), JSFooPrototypeTableValues, *this); + JSC_TO_STRING_TAG_WITHOUT_TRANSITION(); +} +``` + +## Getter/Setter/Function Definitions + +```cpp +// Getter +JSC_DEFINE_CUSTOM_GETTER(jsFooGetter_prop, (JSGlobalObject* globalObject, EncodedJSValue thisValue, PropertyName)) { + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + JSFoo* thisObject = jsDynamicCast(JSValue::decode(thisValue)); + if (UNLIKELY(!thisObject)) { + Bun::throwThisTypeError(*globalObject, scope, "JSFoo"_s, "prop"_s); + return {}; + } + return JSValue::encode(jsBoolean(thisObject->value())); +} + +// Function +JSC_DEFINE_HOST_FUNCTION(jsFooProtoFuncMethod, (JSGlobalObject* globalObject, CallFrame* callFrame)) { + VM& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto* thisObject = jsDynamicCast(callFrame->thisValue()); + if (UNLIKELY(!thisObject)) { + Bun::throwThisTypeError(*globalObject, scope, "Foo"_s, "method"_s); + return {}; + } + return JSValue::encode(thisObject->doSomething(vm, globalObject)); +} +``` + +## Constructor Class + +```cpp +class JSFooConstructor final : public JSC::InternalFunction { +public: + using Base = JSC::InternalFunction; + static constexpr unsigned StructureFlags = Base::StructureFlags; + + static JSFooConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype) { + JSFooConstructor* constructor = new (NotNull, JSC::allocateCell(vm)) JSFooConstructor(vm, structure); + constructor->finishCreation(vm, prototype); + return constructor; + } + + DECLARE_INFO; + + template + static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.internalFunctionSpace(); } + + static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) { + return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info()); + } + +private: + JSFooConstructor(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure, callFoo, constructFoo) {} + + void finishCreation(JSC::VM& vm, JSC::JSObject* prototype) { + Base::finishCreation(vm, 0, "Foo"_s); + putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly); + } +}; +``` + +## Structure Caching + +Add to `ZigGlobalObject.h`: + +```cpp +JSC::LazyClassStructure m_JSFooClassStructure; +``` + +Initialize in `ZigGlobalObject.cpp`: + +```cpp +m_JSFooClassStructure.initLater([](LazyClassStructure::Initializer& init) { + Bun::initJSFooClassStructure(init); +}); +``` + +Visit in `visitChildrenImpl`: + +```cpp +m_JSFooClassStructure.visit(visitor); +``` + +## Expose to Zig + +```cpp +extern "C" JSC::EncodedJSValue Bun__JSFooConstructor(Zig::GlobalObject* globalObject) { + return JSValue::encode(globalObject->m_JSFooClassStructure.constructor(globalObject)); +} + +extern "C" EncodedJSValue Bun__Foo__toJS(Zig::GlobalObject* globalObject, Foo* foo) { + auto* structure = globalObject->m_JSFooClassStructure.get(globalObject); + return JSValue::encode(JSFoo::create(globalObject->vm(), structure, globalObject, WTFMove(foo))); +} +``` + +Include `#include "root.h"` at the top of C++ files. diff --git a/.claude/skills/implementing-jsc-classes-zig/SKILL.md b/.claude/skills/implementing-jsc-classes-zig/SKILL.md new file mode 100644 index 0000000000..bf2e5a5368 --- /dev/null +++ b/.claude/skills/implementing-jsc-classes-zig/SKILL.md @@ -0,0 +1,206 @@ +--- +name: implementing-jsc-classes-zig +description: Creates JavaScript classes using Bun's Zig bindings generator (.classes.ts). Use when implementing new JS APIs in Zig with JSC integration. +--- + +# Bun's JavaScriptCore Class Bindings Generator + +Bridge JavaScript and Zig through `.classes.ts` definitions and Zig implementations. + +## Architecture + +1. **Zig Implementation** (.zig files) +2. **JavaScript Interface Definition** (.classes.ts files) +3. **Generated Code** (C++/Zig files connecting them) + +## Class Definition (.classes.ts) + +```typescript +define({ + name: "TextDecoder", + constructor: true, + JSType: "object", + finalize: true, + proto: { + decode: { args: 1 }, + encoding: { getter: true, cache: true }, + fatal: { getter: true }, + }, +}); +``` + +Options: + +- `name`: Class name +- `constructor`: Has public constructor +- `JSType`: "object", "function", etc. +- `finalize`: Needs cleanup +- `proto`: Properties/methods +- `cache`: Cache property values via WriteBarrier + +## Zig Implementation + +```zig +pub const TextDecoder = struct { + pub const js = JSC.Codegen.JSTextDecoder; + pub const toJS = js.toJS; + pub const fromJS = js.fromJS; + pub const fromJSDirect = js.fromJSDirect; + + encoding: []const u8, + fatal: bool, + + pub fn constructor( + globalObject: *JSGlobalObject, + callFrame: *JSC.CallFrame, + ) bun.JSError!*TextDecoder { + return bun.new(TextDecoder, .{ .encoding = "utf-8", .fatal = false }); + } + + pub fn decode( + this: *TextDecoder, + globalObject: *JSGlobalObject, + callFrame: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + const args = callFrame.arguments(); + if (args.len < 1 or args.ptr[0].isUndefinedOrNull()) { + return globalObject.throw("Input cannot be null", .{}); + } + return JSC.JSValue.jsString(globalObject, "result"); + } + + pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue { + return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding); + } + + fn deinit(this: *TextDecoder) void { + // Release resources + } + + pub fn finalize(this: *TextDecoder) void { + this.deinit(); + bun.destroy(this); + } +}; +``` + +**Key patterns:** + +- Use `bun.JSError!JSValue` return type for error handling +- Use `globalObject` not `ctx` +- `deinit()` for cleanup, `finalize()` called by GC +- Update `src/bun.js/bindings/generated_classes_list.zig` + +## CallFrame Access + +```zig +const args = callFrame.arguments(); +const first_arg = args.ptr[0]; // Access as slice +const argCount = args.len; +const thisValue = callFrame.thisValue(); +``` + +## Property Caching + +For `cache: true` properties, generated accessors: + +```zig +// Get cached value +pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { + const result = TextDecoderPrototype__encodingGetCachedValue(thisValue); + if (result == .zero) return null; + return result; +} + +// Set cached value +pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { + TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value); +} +``` + +## Error Handling + +```zig +pub fn method(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const args = callFrame.arguments(); + if (args.len < 1) { + return globalObject.throw("Missing required argument", .{}); + } + return JSC.JSValue.jsString(globalObject, "Success!"); +} +``` + +## Memory Management + +```zig +pub fn deinit(this: *TextDecoder) void { + this._encoding.deref(); + if (this.buffer) |buffer| { + bun.default_allocator.free(buffer); + } +} + +pub fn finalize(this: *TextDecoder) void { + JSC.markBinding(@src()); + this.deinit(); + bun.default_allocator.destroy(this); +} +``` + +## Creating a New Binding + +1. Define interface in `.classes.ts`: + +```typescript +define({ + name: "MyClass", + constructor: true, + finalize: true, + proto: { + myMethod: { args: 1 }, + myProperty: { getter: true, cache: true }, + }, +}); +``` + +2. Implement in `.zig`: + +```zig +pub const MyClass = struct { + pub const js = JSC.Codegen.JSMyClass; + pub const toJS = js.toJS; + pub const fromJS = js.fromJS; + + value: []const u8, + + pub const new = bun.TrivialNew(@This()); + + pub fn constructor(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!*MyClass { + return MyClass.new(.{ .value = "" }); + } + + pub fn myMethod(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + return JSC.JSValue.jsUndefined(); + } + + pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue { + return JSC.JSValue.jsString(globalObject, this.value); + } + + pub fn deinit(this: *MyClass) void {} + + pub fn finalize(this: *MyClass) void { + this.deinit(); + bun.destroy(this); + } +}; +``` + +3. Add to `src/bun.js/bindings/generated_classes_list.zig` + +## Generated Components + +- **C++ Classes**: `JSMyClass`, `JSMyClassPrototype`, `JSMyClassConstructor` +- **Method Bindings**: `MyClassPrototype__myMethodCallback` +- **Property Accessors**: `MyClassPrototype__myPropertyGetterWrap` +- **Zig Bindings**: External function declarations, cached value accessors diff --git a/.claude/skills/writing-bundler-tests/SKILL.md b/.claude/skills/writing-bundler-tests/SKILL.md new file mode 100644 index 0000000000..b8fbe56b78 --- /dev/null +++ b/.claude/skills/writing-bundler-tests/SKILL.md @@ -0,0 +1,222 @@ +--- +name: writing-bundler-tests +description: Guides writing bundler tests using itBundled/expectBundled in test/bundler/. Use when creating or modifying bundler, transpiler, or code transformation tests. +--- + +# Writing Bundler Tests + +Bundler tests use `itBundled()` from `test/bundler/expectBundled.ts` to test Bun's bundler. + +## Basic Usage + +```typescript +import { describe } from "bun:test"; +import { itBundled, dedent } from "./expectBundled"; + +describe("bundler", () => { + itBundled("category/TestName", { + files: { + "index.js": `console.log("hello");`, + }, + run: { + stdout: "hello", + }, + }); +}); +``` + +Test ID format: `category/TestName` (e.g., `banner/CommentBanner`, `minify/Empty`) + +## File Setup + +```typescript +{ + files: { + "index.js": `console.log("test");`, + "lib.ts": `export const foo = 123;`, + "nested/file.js": `export default {};`, + }, + entryPoints: ["index.js"], // defaults to first file + runtimeFiles: { // written AFTER bundling + "extra.js": `console.log("added later");`, + }, +} +``` + +## Bundler Options + +```typescript +{ + outfile: "/out.js", + outdir: "/out", + format: "esm" | "cjs" | "iife", + target: "bun" | "browser" | "node", + + // Minification + minifyWhitespace: true, + minifyIdentifiers: true, + minifySyntax: true, + + // Code manipulation + banner: "// copyright", + footer: "// end", + define: { "PROD": "true" }, + external: ["lodash"], + + // Advanced + sourceMap: "inline" | "external", + splitting: true, + treeShaking: true, + drop: ["console"], +} +``` + +## Runtime Verification + +```typescript +{ + run: { + stdout: "expected output", // exact match + stdout: /regex/, // pattern match + partialStdout: "contains this", // substring + stderr: "error output", + exitCode: 1, + env: { NODE_ENV: "production" }, + runtime: "bun" | "node", + + // Runtime errors + error: "ReferenceError: x is not defined", + }, +} +``` + +## Bundle Errors/Warnings + +```typescript +{ + bundleErrors: { + "/file.js": ["error message 1", "error message 2"], + }, + bundleWarnings: { + "/file.js": ["warning message"], + }, +} +``` + +## Dead Code Elimination (DCE) + +Add markers in source code: + +```javascript +// KEEP - this should survive +const used = 1; + +// REMOVE - this should be eliminated +const unused = 2; +``` + +```typescript +{ + dce: true, + dceKeepMarkerCount: 5, // expected KEEP markers +} +``` + +## Capture Pattern + +Verify exact transpilation with `capture()`: + +```typescript +itBundled("string/Folding", { + files: { + "index.ts": `capture(\`\${1 + 1}\`);`, + }, + capture: ['"2"'], // expected captured value + minifySyntax: true, +}); +``` + +## Post-Bundle Assertions + +```typescript +{ + onAfterBundle(api) { + api.expectFile("out.js").toContain("console.log"); + api.assertFileExists("out.js"); + + const content = api.readFile("out.js"); + expect(content).toMatchSnapshot(); + + const values = api.captureFile("out.js"); + expect(values).toEqual(["2"]); + }, +} +``` + +## Common Patterns + +**Simple output verification:** + +```typescript +itBundled("banner/Comment", { + banner: "// copyright", + files: { "a.js": `console.log("Hello")` }, + onAfterBundle(api) { + api.expectFile("out.js").toContain("// copyright"); + }, +}); +``` + +**Multi-file CJS/ESM interop:** + +```typescript +itBundled("cjs/ImportSyntax", { + files: { + "entry.js": `import lib from './lib.cjs'; console.log(lib);`, + "lib.cjs": `exports.foo = 'bar';`, + }, + run: { stdout: '{"foo":"bar"}' }, +}); +``` + +**Error handling:** + +```typescript +itBundled("edgecase/InvalidLoader", { + files: { "index.js": `...` }, + bundleErrors: { + "index.js": ["Unsupported loader type"], + }, +}); +``` + +## Test Organization + +```text +test/bundler/ +├── bundler_banner.test.ts +├── bundler_string.test.ts +├── bundler_minify.test.ts +├── bundler_cjs.test.ts +├── bundler_edgecase.test.ts +├── bundler_splitting.test.ts +├── css/ +├── transpiler/ +└── expectBundled.ts +``` + +## Running Tests + +```bash +bun bd test test/bundler/bundler_banner.test.ts +BUN_BUNDLER_TEST_FILTER="banner/Comment" bun bd test bundler_banner.test.ts +BUN_BUNDLER_TEST_DEBUG=1 bun bd test bundler_minify.test.ts +``` + +## Key Points + +- Use `dedent` for readable multi-line code +- File paths are relative (e.g., `/index.js`) +- Use `capture()` to verify exact transpilation results +- Use `.toMatchSnapshot()` for complex outputs +- Pass array to `run` for multiple test scenarios diff --git a/.claude/skills/writing-dev-server-tests/SKILL.md b/.claude/skills/writing-dev-server-tests/SKILL.md new file mode 100644 index 0000000000..eabbcfeb0a --- /dev/null +++ b/.claude/skills/writing-dev-server-tests/SKILL.md @@ -0,0 +1,94 @@ +--- +name: writing-dev-server-tests +description: Guides writing HMR/Dev Server tests in test/bake/. Use when creating or modifying dev server, hot reloading, or bundling tests. +--- + +# Writing HMR/Dev Server Tests + +Dev server tests validate hot-reloading robustness and reliability. + +## File Structure + +- `test/bake/bake-harness.ts` - shared utilities: `devTest`, `prodTest`, `devAndProductionTest`, `Dev` class, `Client` class +- `test/bake/client-fixture.mjs` - subprocess for `Client` (page loading, IPC queries) +- `test/bake/dev/*.test.ts` - dev server and hot reload tests +- `test/bake/dev-and-prod.ts` - tests running on both dev and production mode + +## Test Categories + +- `bundle.test.ts` - DevServer-specific bundling bugs +- `css.test.ts` - CSS bundling issues +- `plugins.test.ts` - development mode plugins +- `ecosystem.test.ts` - library compatibility (prefer concrete bugs over full package tests) +- `esm.test.ts` - ESM features in development +- `html.test.ts` - HTML file handling +- `react-spa.test.ts` - React, react-refresh transform, server components +- `sourcemap.test.ts` - source map correctness + +## devTest Basics + +```ts +import { devTest, emptyHtmlFile } from "../bake-harness"; + +devTest("html file is watched", { + files: { + "index.html": emptyHtmlFile({ + scripts: ["/script.ts"], + body: "

Hello

", + }), + "script.ts": `console.log("hello");`, + }, + async test(dev) { + await dev.fetch("/").expect.toInclude("

Hello

"); + await dev.patch("index.html", { find: "Hello", replace: "World" }); + await dev.fetch("/").expect.toInclude("

World

"); + + await using c = await dev.client("/"); + await c.expectMessage("hello"); + + await c.expectReload(async () => { + await dev.patch("index.html", { find: "World", replace: "Bar" }); + }); + await c.expectMessage("hello"); + }, +}); +``` + +## Key APIs + +- **`files`**: Initial filesystem state +- **`dev.fetch()`**: HTTP requests +- **`dev.client()`**: Opens browser instance +- **`dev.write/patch/delete`**: Filesystem mutations (wait for hot-reload automatically) +- **`c.expectMessage()`**: Assert console.log output +- **`c.expectReload()`**: Wrap code that causes hard reload + +**Important**: Use `dev.write/patch/delete` instead of `node:fs` - they wait for hot-reload. + +## Testing Errors + +```ts +devTest("import then create", { + files: { + "index.html": ``, + "script.ts": `import data from "./data"; console.log(data);`, + }, + async test(dev) { + const c = await dev.client("/", { + errors: ['script.ts:1:18: error: Could not resolve: "./data"'], + }); + await c.expectReload(async () => { + await dev.write("data.ts", "export default 'data';"); + }); + await c.expectMessage("data"); + }, +}); +``` + +Specify expected errors with the `errors` option: + +```ts +await dev.delete("other.ts", { + errors: ['index.ts:1:16: error: Could not resolve: "./other"'], +}); +``` diff --git a/.claude/skills/zig-system-calls/SKILL.md b/.claude/skills/zig-system-calls/SKILL.md new file mode 100644 index 0000000000..48f179463a --- /dev/null +++ b/.claude/skills/zig-system-calls/SKILL.md @@ -0,0 +1,268 @@ +--- +name: zig-system-calls +description: Guides using bun.sys for system calls and file I/O in Zig. Use when implementing file operations instead of std.fs or std.posix. +--- + +# System Calls & File I/O in Zig + +Use `bun.sys` instead of `std.fs` or `std.posix` for cross-platform syscalls with proper error handling. + +## bun.sys.File (Preferred) + +For most file operations, use the `bun.sys.File` wrapper: + +```zig +const File = bun.sys.File; + +const file = switch (File.open(path, bun.O.RDWR, 0o644)) { + .result => |f| f, + .err => |err| return .{ .err = err }, +}; +defer file.close(); + +// Read/write +_ = try file.read(buffer).unwrap(); +_ = try file.writeAll(data).unwrap(); + +// Get file info +const stat = try file.stat().unwrap(); +const size = try file.getEndPos().unwrap(); + +// std.io compatible +const reader = file.reader(); +const writer = file.writer(); +``` + +### Complete Example + +```zig +const File = bun.sys.File; + +pub fn writeFile(path: [:0]const u8, data: []const u8) File.WriteError!void { + const file = switch (File.open(path, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) { + .result => |f| f, + .err => |err| return err.toError(), + }; + defer file.close(); + + _ = switch (file.writeAll(data)) { + .result => {}, + .err => |err| return err.toError(), + }; +} +``` + +## Why bun.sys? + +| Aspect | bun.sys | std.fs/std.posix | +| ----------- | -------------------------------- | ------------------- | +| Return Type | `Maybe(T)` with detailed Error | Generic error union | +| Windows | Full support with libuv fallback | Limited/POSIX-only | +| Error Info | errno, syscall tag, path, fd | errno only | +| EINTR | Automatic retry | Manual handling | + +## Error Handling with Maybe(T) + +`bun.sys` functions return `Maybe(T)` - a tagged union: + +```zig +const sys = bun.sys; + +// Pattern 1: Switch on result/error +switch (sys.read(fd, buffer)) { + .result => |bytes_read| { + // use bytes_read + }, + .err => |err| { + // err.errno, err.syscall, err.fd, err.path + if (err.getErrno() == .AGAIN) { + // handle EAGAIN + } + }, +} + +// Pattern 2: Unwrap with try (converts to Zig error) +const bytes = try sys.read(fd, buffer).unwrap(); + +// Pattern 3: Unwrap with default +const value = sys.stat(path).unwrapOr(default_stat); +``` + +## Low-Level File Operations + +Only use these when `bun.sys.File` doesn't meet your needs. + +### Opening Files + +```zig +const sys = bun.sys; + +// Use bun.O flags (cross-platform normalized) +const fd = switch (sys.open(path, bun.O.RDONLY, 0)) { + .result => |fd| fd, + .err => |err| return .{ .err = err }, +}; +defer fd.close(); + +// Common flags +bun.O.RDONLY, bun.O.WRONLY, bun.O.RDWR +bun.O.CREAT, bun.O.TRUNC, bun.O.APPEND +bun.O.NONBLOCK, bun.O.DIRECTORY +``` + +### Reading & Writing + +```zig +// Single read (may return less than buffer size) +switch (sys.read(fd, buffer)) { + .result => |n| { /* n bytes read */ }, + .err => |err| { /* handle error */ }, +} + +// Read until EOF or buffer full +const total = try sys.readAll(fd, buffer).unwrap(); + +// Position-based read/write +sys.pread(fd, buffer, offset) +sys.pwrite(fd, data, offset) + +// Vector I/O +sys.readv(fd, iovecs) +sys.writev(fd, iovecs) +``` + +### File Info + +```zig +sys.stat(path) // Follow symlinks +sys.lstat(path) // Don't follow symlinks +sys.fstat(fd) // From file descriptor +sys.fstatat(fd, path) + +// Linux-only: faster selective stat +sys.statx(path, &.{ .size, .mtime }) +``` + +### Path Operations + +```zig +sys.unlink(path) +sys.unlinkat(dir_fd, path) +sys.rename(from, to) +sys.renameat(from_dir, from, to_dir, to) +sys.readlink(path, buf) +sys.readlinkat(fd, path, buf) +sys.link(T, src, dest) +sys.linkat(src_fd, src, dest_fd, dest) +sys.symlink(target, dest) +sys.symlinkat(target, dirfd, dest) +sys.mkdir(path, mode) +sys.mkdirat(dir_fd, path, mode) +sys.rmdir(path) +``` + +### Permissions + +```zig +sys.chmod(path, mode) +sys.fchmod(fd, mode) +sys.fchmodat(fd, path, mode, flags) +sys.chown(path, uid, gid) +sys.fchown(fd, uid, gid) +``` + +### Closing File Descriptors + +Close is on `bun.FD`: + +```zig +fd.close(); // Asserts on error (use in defer) + +// Or if you need error info: +if (fd.closeAllowingBadFileDescriptor(null)) |err| { + // handle error +} +``` + +## Directory Operations + +```zig +var buf: bun.PathBuffer = undefined; +const cwd = try sys.getcwd(&buf).unwrap(); +const cwdZ = try sys.getcwdZ(&buf).unwrap(); // Zero-terminated +sys.chdir(path, destination) +``` + +### Directory Iteration + +Use `bun.DirIterator` instead of `std.fs.Dir.Iterator`: + +```zig +var iter = bun.iterateDir(dir_fd); +while (true) { + switch (iter.next()) { + .result => |entry| { + if (entry) |e| { + const name = e.name.slice(); + const kind = e.kind; // .file, .directory, .sym_link, etc. + } else { + break; // End of directory + } + }, + .err => |err| return .{ .err = err }, + } +} +``` + +## Socket Operations + +**Important**: `bun.sys` has limited socket support. For network I/O: + +- **Non-blocking sockets**: Use `uws.Socket` (libuwebsockets) exclusively +- **Pipes/blocking I/O**: Use `PipeReader.zig` and `PipeWriter.zig` + +Available in bun.sys: + +```zig +sys.setsockopt(fd, level, optname, value) +sys.socketpair(domain, socktype, protocol, nonblocking_status) +``` + +Do NOT use `bun.sys` for socket read/write - use `uws.Socket` instead. + +## Other Operations + +```zig +sys.ftruncate(fd, size) +sys.lseek(fd, offset, whence) +sys.dup(fd) +sys.dupWithFlags(fd, flags) +sys.fcntl(fd, cmd, arg) +sys.pipe() +sys.mmap(...) +sys.munmap(memory) +sys.access(path, mode) +sys.futimens(fd, atime, mtime) +sys.utimens(path, atime, mtime) +``` + +## Error Type + +```zig +const err: bun.sys.Error = ...; +err.errno // Raw errno value +err.getErrno() // As std.posix.E enum +err.syscall // Which syscall failed (Tag enum) +err.fd // Optional: file descriptor +err.path // Optional: path string +``` + +## Key Points + +- Prefer `bun.sys.File` wrapper for most file operations +- Use low-level `bun.sys` functions only when needed +- Use `bun.O.*` flags instead of `std.os.O.*` +- Handle `Maybe(T)` with switch or `.unwrap()` +- Use `defer fd.close()` for cleanup +- EINTR is handled automatically in most functions +- For sockets, use `uws.Socket` not `bun.sys` diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 5a0c7d0aad..3cb5add023 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -1,5 +1,9 @@ language: en-US +issue_enrichment: + auto_enrich: + enabled: false + reviews: profile: assertive request_changes_workflow: false diff --git a/.cursor/rules/building-bun.mdc b/.cursor/rules/building-bun.mdc deleted file mode 100644 index 2fef59b551..0000000000 --- a/.cursor/rules/building-bun.mdc +++ /dev/null @@ -1,41 +0,0 @@ ---- -description: -globs: src/**/*.cpp,src/**/*.zig -alwaysApply: false ---- - -### Build Commands - -- **Build debug version**: `bun bd` or `bun run build:debug` - - Creates a debug build at `./build/debug/bun-debug` - - Compilation takes ~2.5 minutes -- **Run tests with your debug build**: `bun bd test ` - - **CRITICAL**: Never use `bun test` directly - it won't include your changes -- **Run any command with debug build**: `bun bd ` - -### Run a file - -To run a file, use: - -```sh -bun bd <...args> -``` - -**CRITICAL**: Never use `bun ` directly. It will not have your changes. - -### Logging - -`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope. - -Debug logs look like this: - -```zig -const log = bun.Output.scoped(.${SCOPE}, .hidden); - -// ...later -log("MY DEBUG LOG", .{}) -``` - -### Code Generation - -Code generation happens automatically as part of the build process. There are no commands to run. diff --git a/.cursor/rules/dev-server-tests.mdc b/.cursor/rules/dev-server-tests.mdc deleted file mode 100644 index 23b1ec153b..0000000000 --- a/.cursor/rules/dev-server-tests.mdc +++ /dev/null @@ -1,139 +0,0 @@ ---- -description: Writing HMR/Dev Server tests -globs: test/bake/* ---- - -# Writing HMR/Dev Server tests - -Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests. - -## File Structure - -- `test/bake/bake-harness.ts` - shared utilities and test harness - - primary test functions `devTest` / `prodTest` / `devAndProductionTest` - - class `Dev` (controls subprocess for dev server) - - class `Client` (controls a happy-dom subprocess for having the page open) - - more helpers -- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more. -- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading -- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons. - -## Categories - -bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer. -css.test.ts - CSS tests concern bundling bugs with CSS files -plugins.test.ts - Plugin tests concern plugins in development mode. -ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages. -esm.test.ts - ESM tests are about various esm features in development mode. -html.test.ts - HTML tests are tests relating to HTML files themselves. -react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms. -sourcemap.test.ts - Tests verifying source-maps are correct. - -## `devTest` Basics - -A test takes in two primary inputs: `files` and `async test(dev) {` - -```ts -import { devTest, emptyHtmlFile } from "../bake-harness"; - -devTest("html file is watched", { - files: { - "index.html": emptyHtmlFile({ - scripts: ["/script.ts"], - body: "

Hello

", - }), - "script.ts": ` - console.log("hello"); - `, - }, - async test(dev) { - await dev.fetch("/").expect.toInclude("

Hello

"); - await dev.fetch("/").expect.toInclude("

Hello

"); - await dev.patch("index.html", { - find: "Hello", - replace: "World", - }); - await dev.fetch("/").expect.toInclude("

World

"); - - // Works - await using c = await dev.client("/"); - await c.expectMessage("hello"); - - // Editing HTML reloads - await c.expectReload(async () => { - await dev.patch("index.html", { - find: "World", - replace: "Hello", - }); - await dev.fetch("/").expect.toInclude("

Hello

"); - }); - await c.expectMessage("hello"); - - await c.expectReload(async () => { - await dev.patch("index.html", { - find: "Hello", - replace: "Bar", - }); - await dev.fetch("/").expect.toInclude("

Bar

"); - }); - await c.expectMessage("hello"); - - await c.expectReload(async () => { - await dev.patch("script.ts", { - find: "hello", - replace: "world", - }); - }); - await c.expectMessage("world"); - }, -}); -``` - -`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code. - -Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes. - -When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test. - -Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs. - -## Testing for bundling errors - -By default, a client opening a page to an error will fail the test. This makes testing errors explicit. - -```ts -devTest("import then create", { - files: { - "index.html": ` - - - - - - - - `, - "script.ts": ` - import data from "./data"; - console.log(data); - `, - }, - async test(dev) { - const c = await dev.client("/", { - errors: ['script.ts:1:18: error: Could not resolve: "./data"'], - }); - await c.expectReload(async () => { - await dev.write("data.ts", "export default 'data';"); - }); - await c.expectMessage("data"); - }, -}); -``` - -Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure. - -```ts -await dev.delete("other.ts", { - errors: ['index.ts:1:16: error: Could not resolve: "./other"'], -}); -``` diff --git a/.cursor/rules/javascriptcore-class.mdc b/.cursor/rules/javascriptcore-class.mdc deleted file mode 100644 index 0e1507223f..0000000000 --- a/.cursor/rules/javascriptcore-class.mdc +++ /dev/null @@ -1,413 +0,0 @@ ---- -description: JavaScript class implemented in C++ -globs: *.cpp -alwaysApply: false ---- - -# Implementing JavaScript classes in C++ - -If there is a publicly accessible Constructor and Prototype, then there are 3 classes: - -- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp). -- class FooPrototype : public JSC::JSNonFinalObject -- class FooConstructor : public JSC::InternalFunction - -If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare). - -If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces. - -Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors. - -Generally, defining the subspace looks like this: - -```c++ - -class Foo : public JSC::DestructibleObject { - -// ... - - template - static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) - { - if constexpr (mode == JSC::SubspaceAccess::Concurrently) - return nullptr; - return WebCore::subspaceForImpl( - vm, - [](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); }, - [](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward(space); }, - [](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); }, - [](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward(space); }); - } - - -``` - -It's better to put it in the .cpp file instead of the .h file, when possible. - -## Defining properties - -Define properties on the prototype. Use a const HashTableValues like this: - -```C++ -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString); -static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify); - -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate); -static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate); - -static const HashTableValue JSX509CertificatePrototypeTableValues[] = { - { "ca"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } }, - { "checkEmail"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } }, - { "checkHost"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } }, - { "checkIP"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } }, - { "checkIssued"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } }, - { "checkPrivateKey"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } }, - { "fingerprint"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } }, - { "fingerprint256"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } }, - { "fingerprint512"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } }, - { "infoAccess"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } }, - { "issuer"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } }, - { "issuerCertificate"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } }, - { "keyUsage"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } }, - { "publicKey"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } }, - { "raw"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } }, - { "serialNumber"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } }, - { "subject"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } }, - { "subjectAltName"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } }, - { "toJSON"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } }, - { "toLegacyObject"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } }, - { "toString"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } }, - { "validFrom"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } }, - { "validFromDate"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } }, - { "validTo"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } }, - { "validToDate"_s, static_cast(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } }, - { "verify"_s, static_cast(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } }, -}; -``` - -### Creating a prototype class - -Follow a pattern like this: - -```c++ -class JSX509CertificatePrototype final : public JSC::JSNonFinalObject { -public: - using Base = JSC::JSNonFinalObject; - static constexpr unsigned StructureFlags = Base::StructureFlags; - - static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) - { - JSX509CertificatePrototype* prototype = new (NotNull, allocateCell(vm)) JSX509CertificatePrototype(vm, structure); - prototype->finishCreation(vm); - return prototype; - } - - template - static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) - { - return &vm.plainObjectSpace(); - } - - DECLARE_INFO; - - static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) - { - auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); - structure->setMayBePrototype(true); - return structure; - } - -private: - JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure) - : Base(vm, structure) - { - } - - void finishCreation(JSC::VM& vm); -}; - -const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) }; - -void JSX509CertificatePrototype::finishCreation(VM& vm) -{ - Base::finishCreation(vm); - reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this); - JSC_TO_STRING_TAG_WITHOUT_TRANSITION(); -} - -} // namespace Bun -``` - -### Getter definition: - -```C++ - -JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName)) -{ - VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - JSX509Certificate* thisObject = jsDynamicCast(JSValue::decode(thisValue)); - if (UNLIKELY(!thisObject)) { - Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s); - return {}; - } - - return JSValue::encode(jsBoolean(thisObject->view().isCA())); -} -``` - -### Setter definition - -```C++ -JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName)) -{ - ImportMetaObject* thisObject = jsDynamicCast(JSValue::decode(thisValue)); - if (UNLIKELY(!thisObject)) - return false; - - JSValue value = JSValue::decode(encodedValue); - if (!value.isCell()) { - // TODO: - return true; - } - - thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell()); - return true; -} -``` - -### Function definition - -```C++ -JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame)) -{ - VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - auto *thisObject = jsDynamicCast(callFrame->thisValue()); - if (UNLIKELY(!thisObject)) { - Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s); - return {}; - } - - return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject)); -} -``` - -### Constructor definition - -```C++ - -JSC_DECLARE_HOST_FUNCTION(callStats); -JSC_DECLARE_HOST_FUNCTION(constructStats); - -class JSStatsConstructor final : public JSC::InternalFunction { -public: - using Base = JSC::InternalFunction; - static constexpr unsigned StructureFlags = Base::StructureFlags; - - static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype) - { - JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell(vm)) JSStatsConstructor(vm, structure); - constructor->finishCreation(vm, prototype); - return constructor; - } - - DECLARE_INFO; - - template - static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) - { - return &vm.internalFunctionSpace(); - } - - static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) - { - return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info()); - } - -private: - JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure) - : Base(vm, structure, callStats, constructStats) - { - } - - void finishCreation(JSC::VM& vm, JSC::JSObject* prototype) - { - Base::finishCreation(vm, 0, "Stats"_s); - putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly); - } -}; -``` - -### Structure caching - -If there's a class, prototype, and constructor: - -1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h) -2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)` -3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl` - -```c++#ZigGlobalObject.cpp -void GlobalObject::finishCreation(VM& vm) { -// ... - m_JSStatsBigIntClassStructure.initLater( - [](LazyClassStructure::Initializer& init) { - // Call the function to initialize our class structure. - Bun::initJSBigIntStatsClassStructure(init); - }); -``` - -Then, implement the function that creates the structure: - -```c++ -void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init) -{ - auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype()); - auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure); - - auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype()); - - auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype); - - auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype); - init.setPrototype(prototype); - init.setStructure(structure); - init.setConstructor(constructor); -} -``` - -If there's only a class, use `JSC::LazyProperty` instead of `JSC::LazyClassStructure`: - -1. Add the `JSC::LazyProperty` to @ZigGlobalObject.h -2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)` -3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl` - void GlobalObject::finishCreation(VM& vm) { - // ... - this.m_myLazyProperty.initLater([](const JSC::LazyProperty::Initializer& init) { - init.set(Bun::initMyStructure(init.vm, reinterpret_cast(init.owner))); - }); - -``` - -Then, implement the function that creates the structure: -```c++ -Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject) -{ - // If there is a prototype: - auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype()); - auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure); - - // If there is no prototype or it only has - - auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype); - init.setPrototype(prototype); - init.setStructure(structure); - init.setConstructor(constructor); -} -``` - -Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)` - -```C++ -JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame)) -{ - VM& vm = globalObject->vm(); - auto scope = DECLARE_THROW_SCOPE(vm); - - if (!callFrame->argumentCount()) { - Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s); - return {}; - } - - JSValue arg = callFrame->uncheckedArgument(0); - if (!arg.isCell()) { - Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s); - return {}; - } - - auto* zigGlobalObject = defaultGlobalObject(globalObject); - Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject); - JSValue newTarget = callFrame->newTarget(); - if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) { - auto scope = DECLARE_THROW_SCOPE(vm); - if (!newTarget) { - throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s); - return {}; - } - - auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject())); - RETURN_IF_EXCEPTION(scope, {}); - structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure()); - RETURN_IF_EXCEPTION(scope, {}); - } - - return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg)); -} -``` - -### Expose to Zig - -To expose the constructor to zig: - -```c++ -extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject) -{ - return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject)); -} -``` - -Zig: - -```zig -extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue; -pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor; -``` - -To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this: - -```c++ -// X509* is whatever we need to create the object -extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert) -{ - // ... implementation details - auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject); - return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert))); -} -``` - -And from Zig: - -```zig -const X509 = opaque { - // ... class - - extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue; - - pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return Bun__X509__toJS(globalObject, this); - } -}; -``` diff --git a/.cursor/rules/registering-bun-modules.mdc b/.cursor/rules/registering-bun-modules.mdc deleted file mode 100644 index 225eaa56ed..0000000000 --- a/.cursor/rules/registering-bun-modules.mdc +++ /dev/null @@ -1,203 +0,0 @@ -# Registering Functions, Objects, and Modules in Bun - -This guide documents the process of adding new functionality to the Bun global object and runtime. - -## Overview - -Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users. - -There are several key ways to expose functionality in Bun: - -1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`) -2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`) -3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`) -4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`) - -## The Registration Process - -Adding new functionality to Bun involves several coordinated steps across multiple files: - -### 1. Implement the Core Functionality in Zig - -First, implement your feature in Zig, typically in its own directory in `src/`. Examples: - -- `src/valkey/` for Redis/Valkey client -- `src/semver/` for SemVer functionality -- `src/smtp/` for SMTP client - -### 2. Create JavaScript Bindings - -Create bindings that expose your Zig functionality to JavaScript: - -- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface -- Implement `JSYourFeature` struct in a file like `js_your_feature.zig` - -Example from a class definition file: - -```typescript -// Example from a .classes.ts file -import { define } from "../../codegen/class-definitions"; - -export default [ - define({ - name: "YourFeature", - construct: true, - finalize: true, - hasPendingActivity: true, - memoryCost: true, - klass: {}, - JSType: "0b11101110", - proto: { - yourMethod: { - fn: "yourZigMethod", - length: 1, - }, - property: { - getter: "getProperty", - }, - }, - values: ["cachedValues"], - }), -]; -``` - -### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h` - -Add an entry to the `FOR_EACH_GETTER` macro: - -```c -// In BunObject+exports.h -#define FOR_EACH_GETTER(macro) \ - macro(CSRF) \ - macro(CryptoHasher) \ - ... \ - macro(YourFeature) \ -``` - -### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig` - -Implement a getter function in `BunObject.zig` that returns your feature: - -```zig -// In BunObject.zig -pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor); - -// In the exportAll() function: -@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") }); -``` - -### 5. Implement the Getter Function in a Relevant Zig File - -Implement the function that creates your object: - -```zig -// In your main module file (e.g., src/your_feature/your_feature.zig) -pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { - return JSC.API.YourFeature.getConstructor(globalThis); -} -``` - -### 6. Add to Build System - -Ensure your files are included in the build system by adding them to the appropriate targets. - -## Example: Adding a New Module - -Here's a comprehensive example of adding a hypothetical SMTP module: - -1. Create implementation files in `src/smtp/`: - - - `index.zig`: Main entry point that exports everything - - `SmtpClient.zig`: Core SMTP client implementation - - `js_smtp.zig`: JavaScript bindings - - `js_bindings.classes.ts`: Class definition - -2. Define your JS class in `js_bindings.classes.ts`: - -```typescript -import { define } from "../../codegen/class-definitions"; - -export default [ - define({ - name: "EmailClient", - construct: true, - finalize: true, - hasPendingActivity: true, - configurable: false, - memoryCost: true, - klass: {}, - JSType: "0b11101110", - proto: { - send: { - fn: "send", - length: 1, - }, - verify: { - fn: "verify", - length: 0, - }, - close: { - fn: "close", - length: 0, - }, - }, - values: ["connectionPromise"], - }), -]; -``` - -3. Add getter to `BunObject+exports.h`: - -```c -#define FOR_EACH_GETTER(macro) \ - macro(CSRF) \ - ... \ - macro(SMTP) \ -``` - -4. Add getter function to `BunObject.zig`: - -```zig -pub const SMTP = toJSGetter(Bun.getSmtpConstructor); - -// In exportAll: -@export(&BunObject.SMTP, .{ .name = getterName("SMTP") }); -``` - -5. Implement getter in your module: - -```zig -pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { - return JSC.API.JSEmailClient.getConstructor(globalThis); -} -``` - -## Best Practices - -1. **Follow Naming Conventions**: Align your naming with existing patterns -2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance -3. **Memory Management**: Be careful with memory management and reference counting -4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation -5. **Documentation**: Add JSDoc comments to your JavaScript bindings -6. **Testing**: Add tests for your new functionality - -## Common Gotchas - -- Be sure to handle reference counting properly with `ref()`/`deref()` -- Always implement proper cleanup in `deinit()` and `finalize()` -- For network operations, manage socket lifetimes correctly -- Use `JSC.Codegen` correctly to generate necessary binding code - -## Related Files - -- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions -- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation -- `src/bun.js/api/BunObject.classes.ts`: Class definitions -- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings - -## Additional Resources - -For more detailed information on specific topics: - -- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings -- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples diff --git a/.cursor/rules/writing-tests.mdc b/.cursor/rules/writing-tests.mdc deleted file mode 100644 index c5ce928766..0000000000 --- a/.cursor/rules/writing-tests.mdc +++ /dev/null @@ -1,91 +0,0 @@ ---- -description: Writing tests for Bun -globs: ---- -# Writing tests for Bun - -## Where tests are found - -You'll find all of Bun's tests in the `test/` directory. - -* `test/` - * `cli/` - CLI command tests, like `bun install` or `bun init` - * `js/` - JavaScript & TypeScript tests - * `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests - * `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests - * `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style) - * `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests - * `third_party/` - npm package tests, to validate that basic usage works in Bun - * `napi/` - N-API tests - * `v8/` - V8 C++ API tests - * `bundler/` - Bundler, transpiler, CSS, and `bun build` tests - * `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue - -## How tests are written - -Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`. - -```js -import { describe, test, expect } from "bun:test"; -import assert, { AssertionError } from "assert"; - -describe("assert(expr)", () => { - test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => { - expect(() => assert(expr)).not.toThrow(); - }); - - test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => { - expect(() => assert(expr)).toThrow(AssertionError); - }); -}); -``` - -## Testing conventions - -* See `test/harness.ts` for common test utilities and helpers -* Be rigorous and test for edge-cases and unexpected inputs -* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible -* When you need to test Bun as a CLI, use the following pattern: - -```js -import { test, expect } from "bun:test"; -import { spawn } from "bun"; -import { bunExe, bunEnv } from "harness"; - -test("bun --version", async () => { - const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({ - cmd: [bunExe(), "--version"], - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - const [ exitCode, stdout, stderr ] = await Promise.all([ - exited, - new Response(stdoutStream).text(), - new Response(stderrStream).text(), - ]); - expect({ exitCode, stdout, stderr }).toMatchObject({ - exitCode: 0, - stdout: expect.stringContaining(Bun.version), - stderr: "", - }); -}); -``` - -## Before writing a test - -* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun -* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun -* When the expected behaviour is ambigious, defer to matching what happens in Node.js -* Always attempt to find related tests in an existing test file before creating a new test file - - - - - - - - - - - diff --git a/.cursor/rules/zig-javascriptcore-classes.mdc b/.cursor/rules/zig-javascriptcore-classes.mdc deleted file mode 100644 index 88636c9752..0000000000 --- a/.cursor/rules/zig-javascriptcore-classes.mdc +++ /dev/null @@ -1,509 +0,0 @@ ---- -description: How Zig works with JavaScriptCore bindings generator -globs: -alwaysApply: false ---- - -# Bun's JavaScriptCore Class Bindings Generator - -This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC). - -## Architecture Overview - -Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components: - -1. **Zig Implementation** (.zig files) -2. **JavaScript Interface Definition** (.classes.ts files) -3. **Generated Code** (C++/Zig files that connect everything) - -## Class Definition Files - -### JavaScript Interface (.classes.ts) - -The `.classes.ts` files define the JavaScript API using a declarative approach: - -```typescript -// Example: encoding.classes.ts -define({ - name: "TextDecoder", - constructor: true, - JSType: "object", - finalize: true, - proto: { - decode: { - // Function definition - args: 1, - }, - encoding: { - // Getter with caching - getter: true, - cache: true, - }, - fatal: { - // Read-only property - getter: true, - }, - ignoreBOM: { - // Read-only property - getter: true, - }, - }, -}); -``` - -Each class definition specifies: - -- The class name -- Whether it has a constructor -- JavaScript type (object, function, etc.) -- Properties and methods in the `proto` field -- Caching strategy for properties -- Finalization requirements - -### Zig Implementation (.zig) - -The Zig files implement the native functionality: - -```zig -// Example: TextDecoder.zig -pub const TextDecoder = struct { - // Expose generated bindings as `js` namespace with trait conversion methods - pub const js = JSC.Codegen.JSTextDecoder; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - // Internal state - encoding: []const u8, - fatal: bool, - ignoreBOM: bool, - - // Constructor implementation - note use of globalObject - pub fn constructor( - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame, - ) bun.JSError!*TextDecoder { - // Implementation - - return bun.new(TextDecoder, .{ - // Fields - }); - } - - // Prototype methods - note return type includes JSError - pub fn decode( - this: *TextDecoder, - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - // Implementation - } - - // Getters - pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue { - return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding); - } - - pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue { - return JSC.JSValue.jsBoolean(this.fatal); - } - - // Cleanup - note standard pattern of using deinit/deref - fn deinit(this: *TextDecoder) void { - // Release any retained resources - // Free the pointer at the end. - bun.destroy(this); - } - - // Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted. - pub fn finalize(this: *TextDecoder) void { - this.deinit(); - } -}; -``` - -Key components in the Zig file: - -- The struct containing native state -- `pub const js = JSC.Codegen.JS` to include generated code -- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling -- Consistent use of `globalObject` parameter name instead of `ctx` -- Methods matching the JavaScript interface -- Getters/setters for properties -- Proper resource cleanup pattern with `deinit()` and `finalize()` -- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class - -## Code Generation System - -The binding generator produces C++ code that connects JavaScript and Zig: - -1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor -2. **Memory Management**: Handles GC integration through JSC's WriteBarrier -3. **Method Binding**: Connects JS function calls to Zig implementations -4. **Type Conversion**: Converts between JS values and Zig types -5. **Property Caching**: Implements the caching system for properties - -The generated C++ code includes: - -- A JSC wrapper class (`JSTextDecoder`) -- A prototype class (`JSTextDecoderPrototype`) -- A constructor function (`JSTextDecoderConstructor`) -- Function bindings (`TextDecoderPrototype__decodeCallback`) -- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`) - -## CallFrame Access - -The `CallFrame` object provides access to JavaScript execution context: - -```zig -pub fn decode( - this: *TextDecoder, - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame -) bun.JSError!JSC.JSValue { - // Get arguments - const input = callFrame.argument(0); - const options = callFrame.argument(1); - - // Get this value - const thisValue = callFrame.thisValue(); - - // Implementation with error handling - if (input.isUndefinedOrNull()) { - return globalObject.throw("Input cannot be null or undefined", .{}); - } - - // Return value or throw error - return JSC.JSValue.jsString(globalObject, "result"); -} -``` - -CallFrame methods include: - -- `argument(i)`: Get the i-th argument -- `argumentCount()`: Get the number of arguments -- `thisValue()`: Get the `this` value -- `callee()`: Get the function being called - -## Property Caching and GC-Owned Values - -The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values: - -```typescript -encoding: { - getter: true, - cache: true, // Enable caching -} -``` - -### C++ Implementation - -In the generated C++ code, caching uses JSC's WriteBarrier: - -```cpp -JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) { - auto& vm = JSC::getVM(lexicalGlobalObject); - Zig::GlobalObject *globalObject = reinterpret_cast(lexicalGlobalObject); - auto throwScope = DECLARE_THROW_SCOPE(vm); - JSTextDecoder* thisObject = jsCast(JSValue::decode(encodedThisValue)); - JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject); - - // Check for cached value and return if present - if (JSValue cachedValue = thisObject->m_encoding.get()) - return JSValue::encode(cachedValue); - - // Get value from Zig implementation - JSC::JSValue result = JSC::JSValue::decode( - TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject) - ); - RETURN_IF_EXCEPTION(throwScope, {}); - - // Store in cache for future access - thisObject->m_encoding.set(vm, thisObject, result); - RELEASE_AND_RETURN(throwScope, JSValue::encode(result)); -} -``` - -### Zig Accessor Functions - -For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values: - -```zig -// External function declarations -extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void; -extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue; - -/// `TextDecoder.encoding` setter -/// This value will be visited by the garbage collector. -pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { - JSC.markBinding(@src()); - TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value); -} - -/// `TextDecoder.encoding` getter -/// This value will be visited by the garbage collector. -pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { - JSC.markBinding(@src()); - const result = TextDecoderPrototype__encodingGetCachedValue(thisValue); - if (result == .zero) - return null; - - return result; -} -``` - -### Benefits of GC-Owned Values - -This system provides several key benefits: - -1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values -2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC -3. **Consistent Access**: Zig code can easily get/set these cached JS values -4. **Performance**: Cached values avoid repeated computation or serialization - -### Use Cases - -GC-owned cached values are particularly useful for: - -1. **Computed Properties**: Store expensive computation results -2. **Lazily Created Objects**: Create objects only when needed, then cache them -3. **References to Other Objects**: Store references to other JS objects that need GC tracking -4. **Memoization**: Cache results based on input parameters - -The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector. - -## Memory Management and Finalization - -The binding system handles memory management across the JavaScript/Zig boundary: - -1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct -2. **Reference Tracking**: JSC's GC tracks all JS references to the object -3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources - -Bun uses a consistent pattern for resource cleanup: - -```zig -// Resource cleanup method - separate from finalization -pub fn deinit(this: *TextDecoder) void { - // Release resources like strings - this._encoding.deref(); // String deref pattern - - // Free any buffers - if (this.buffer) |buffer| { - bun.default_allocator.free(buffer); - } -} - -// Called by the GC when object is collected -pub fn finalize(this: *TextDecoder) void { - JSC.markBinding(@src()); // For debugging - this.deinit(); // Clean up resources - bun.default_allocator.destroy(this); // Free the object itself -} -``` - -Some objects that hold references to other JS objects use `.deref()` instead: - -```zig -pub fn finalize(this: *SocketAddress) void { - JSC.markBinding(@src()); - this._presentation.deref(); // Release references - this.destroy(); -} -``` - -## Error Handling with JSError - -Bun uses `bun.JSError!JSValue` return type for proper error handling: - -```zig -pub fn decode( - this: *TextDecoder, - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame -) bun.JSError!JSC.JSValue { - // Throwing an error - if (callFrame.argumentCount() < 1) { - return globalObject.throw("Missing required argument", .{}); - } - - // Or returning a success value - return JSC.JSValue.jsString(globalObject, "Success!"); -} -``` - -This pattern allows Zig functions to: - -1. Return JavaScript values on success -2. Throw JavaScript exceptions on error -3. Propagate errors automatically through the call stack - -## Type Safety and Error Handling - -The binding system includes robust error handling: - -```cpp -// Example of type checking in generated code -JSTextDecoder* thisObject = jsDynamicCast(callFrame->thisValue()); -if (UNLIKELY(!thisObject)) { - scope.throwException(lexicalGlobalObject, - Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s)); - return {}; -} -``` - -## Prototypal Inheritance - -The binding system creates proper JavaScript prototype chains: - -1. **Constructor**: JSTextDecoderConstructor with standard .prototype property -2. **Prototype**: JSTextDecoderPrototype with methods and properties -3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype - -This ensures JavaScript inheritance works as expected: - -```cpp -// From generated code -void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype) -{ - Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition); - - // Set up the prototype chain - putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly); - ASSERT(inherits(info())); -} -``` - -## Performance Considerations - -The binding system is optimized for performance: - -1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects -2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties -3. **Memory Management**: JSC garbage collection integrated with Zig memory management -4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions - -## Creating a New Class Binding - -To create a new class binding in Bun: - -1. **Define the class interface** in a `.classes.ts` file: - - ```typescript - define({ - name: "MyClass", - constructor: true, - finalize: true, - proto: { - myMethod: { - args: 1, - }, - myProperty: { - getter: true, - cache: true, - }, - }, - }); - ``` - -2. **Implement the native functionality** in a `.zig` file: - - ```zig - pub const MyClass = struct { - // Generated bindings - pub const js = JSC.Codegen.JSMyClass; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - // State - value: []const u8, - - pub const new = bun.TrivialNew(@This()); - - // Constructor - pub fn constructor( - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame, - ) bun.JSError!*MyClass { - const arg = callFrame.argument(0); - // Implementation - } - - // Method - pub fn myMethod( - this: *MyClass, - globalObject: *JSGlobalObject, - callFrame: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - // Implementation - } - - // Getter - pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue { - return JSC.JSValue.jsString(globalObject, this.value); - } - - // Resource cleanup - pub fn deinit(this: *MyClass) void { - // Clean up resources - } - - pub fn finalize(this: *MyClass) void { - this.deinit(); - bun.destroy(this); - } - }; - ``` - -3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including: - - C++ class definitions - - Method and property bindings - - Memory management utilities - - GC integration code - -## Generated Code Structure - -The binding generator produces several components: - -### 1. C++ Classes - -For each Zig class, the system generates: - -- **JS**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`) -- **JSPrototype**: Contains methods and properties (`JSTextDecoderPrototype`) -- **JSConstructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`) - -### 2. C++ Methods and Properties - -- **Method Callbacks**: `TextDecoderPrototype__decodeCallback` -- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap` -- **Initialization Functions**: `finishCreation` methods for setting up the class - -### 3. Zig Bindings - -- **External Function Declarations**: - - ```zig - extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue; - ``` - -- **Cached Value Accessors**: - - ```zig - pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... } - pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... } - ``` - -- **Constructor Helpers**: - ```zig - pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... } - ``` - -### 4. GC Integration - -- **Memory Cost Calculation**: `estimatedSize` method -- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren` -- **Heap Analysis**: `analyzeHeap` for debugging memory issues - -This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users. diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml deleted file mode 100644 index 3ab51a4309..0000000000 --- a/.github/workflows/claude.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: Claude Code - -on: - issue_comment: - types: [created] - pull_request_review_comment: - types: [created] - issues: - types: [opened, assigned] - pull_request_review: - types: [submitted] - -jobs: - claude: - if: | - github.repository == 'oven-sh/bun' && - ( - (github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) || - (github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) || - (github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) || - (github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR')) - ) && - (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || - (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || - (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || - (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) - runs-on: claude - env: - IS_SANDBOX: 1 - container: - image: localhost:5000/claude-bun:latest - options: --privileged --user 1000:1000 - permissions: - contents: read - id-token: write - steps: - - name: Checkout repository - working-directory: /workspace/bun - run: | - git config --global user.email "claude-bot@bun.sh" && \ - git config --global user.name "Claude Bot" && \ - git config --global url."git@github.com:".insteadOf "https://github.com/" && \ - git config --global url."git@github.com:".insteadOf "http://github.com/" && \ - git config --global --add safe.directory /workspace/bun && \ - git config --global push.default current && \ - git config --global pull.rebase true && \ - git config --global init.defaultBranch main && \ - git config --global core.editor "vim" && \ - git config --global color.ui auto && \ - git config --global fetch.prune true && \ - git config --global diff.colorMoved zebra && \ - git config --global merge.conflictStyle diff3 && \ - git config --global rerere.enabled true && \ - git config --global core.autocrlf input - git fetch origin ${{ github.event.pull_request.head.sha }} - git checkout ${{ github.event.pull_request.head.ref }} - git reset --hard origin/${{ github.event.pull_request.head.ref }} - - name: Run Claude Code - id: claude - uses: anthropics/claude-code-action@v1 - with: - timeout_minutes: "180" - claude_args: | - --dangerously-skip-permissions - --system-prompt "You are working on the Bun codebase" - claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} diff --git a/.github/workflows/codex-test-sync.yml b/.github/workflows/codex-test-sync.yml deleted file mode 100644 index 6da63a1911..0000000000 --- a/.github/workflows/codex-test-sync.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Codex Test Sync - -on: - pull_request: - types: [labeled, opened] - -env: - BUN_VERSION: "1.2.15" - -jobs: - sync-node-tests: - runs-on: ubuntu-latest - if: | - (github.event.action == 'labeled' && github.event.label.name == 'codex') || - (github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) || - contains(github.head_ref, 'codex') - permissions: - contents: write - pull-requests: write - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - token: ${{ secrets.GITHUB_TOKEN }} - fetch-depth: 0 - - - name: Setup Bun - uses: ./.github/actions/setup-bun - with: - bun-version: ${{ env.BUN_VERSION }} - - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v44 - with: - files: | - test/js/node/test/parallel/**/*.{js,mjs,ts} - test/js/node/test/sequential/**/*.{js,mjs,ts} - - - name: Sync tests - if: steps.changed-files.outputs.any_changed == 'true' - shell: bash - run: | - echo "Changed test files:" - echo "${{ steps.changed-files.outputs.all_changed_files }}" - - # Process each changed test file - for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - # Extract test name from file path - test_name=$(basename "$file" | sed 's/\.[^.]*$//') - echo "Syncing test: $test_name" - bun node:test:cp "$test_name" - done - - - name: Commit changes - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_message: "Sync Node.js tests with upstream" diff --git a/.github/workflows/comment-lint.yml b/.github/workflows/comment-lint.yml.disabled similarity index 100% rename from .github/workflows/comment-lint.yml rename to .github/workflows/comment-lint.yml.disabled diff --git a/.github/workflows/labeled.yml b/.github/workflows/labeled.yml.disabled similarity index 100% rename from .github/workflows/labeled.yml rename to .github/workflows/labeled.yml.disabled diff --git a/.gitignore b/.gitignore index 4b95245f9c..528cf0fa5f 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ .ninja_deps .ninja_log .npm +.npmrc .npm.gz .parcel-cache .swcrc diff --git a/.vscode/settings.json b/.vscode/settings.json index c2c967c663..826070d42b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -27,18 +27,22 @@ "git.ignoreLimitWarning": true, // Zig - "zig.initialSetupDone": true, - "zig.buildOption": "build", + // "zig.initialSetupDone": true, + // "zig.buildOption": "build", "zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib", - "zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"], - "zig.zls.buildOnSaveStep": "check", + "zig.buildOnSaveArgs": [ + "-Dgenerated-code=./build/debug/codegen", + "--watch", + "-fincremental" + ], + // "zig.zls.buildOnSaveStep": "check", // "zig.zls.enableBuildOnSave": true, // "zig.buildOnSave": true, - "zig.buildFilePath": "${workspaceFolder}/build.zig", + // "zig.buildFilePath": "${workspaceFolder}/build.zig", "zig.path": "${workspaceFolder}/vendor/zig/zig.exe", "zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe", "zig.formattingProvider": "zls", - "zig.zls.enableInlayHints": false, + // "zig.zls.enableInlayHints": false, "[zig]": { "editor.tabSize": 4, "editor.useTabStops": false, diff --git a/CLAUDE.md b/CLAUDE.md index 526996c187..767308d653 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -6,7 +6,7 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed - **Build Bun**: `bun bd` - Creates a debug build at `./build/debug/bun-debug` - - **CRITICAL**: no need for a timeout, the build is really fast! + - **CRITICAL**: do not set a timeout when running `bun bd` - **Run tests with your debug build**: `bun bd test ` - **CRITICAL**: Never use `bun test` directly - it won't include your changes - **Run any command with debug build**: `bun bd ` @@ -94,7 +94,7 @@ test("(multi-file test) my feature", async () => { - Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function. - Use `normalizeBunSnapshot` to normalize snapshot output of the test. -- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test. +- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. These tests will never fail in CI. - Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories. - When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure. - **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION. diff --git a/CMakeLists.txt b/CMakeLists.txt index 8fe9a83f3f..f30ad577c1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -47,15 +47,7 @@ include(SetupEsbuild) include(SetupZig) include(SetupRust) -find_program(SCCACHE_PROGRAM sccache) -if(SCCACHE_PROGRAM AND NOT DEFINED ENV{NO_SCCACHE}) - include(SetupSccache) -else() - find_program(CCACHE_PROGRAM ccache) - if(CCACHE_PROGRAM) - include(SetupCcache) - endif() -endif() +include(SetupCcache) # Generate dependency versions header include(GenerateDependencyVersions) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9d7785abb1..750eb17a62 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,7 +23,7 @@ Using your system's package manager, install Bun's dependencies: {% codetabs group="os" %} ```bash#macOS (Homebrew) -$ brew install automake cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby sccache +$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby ``` ```bash#Ubuntu/Debian @@ -65,43 +65,28 @@ $ brew install bun {% /codetabs %} -### Optional: Install `sccache` +### Optional: Install `ccache` -sccache is used to cache compilation artifacts, significantly speeding up builds. It must be installed with S3 support: +ccache is used to cache compilation artifacts, significantly speeding up builds: ```bash # For macOS -$ brew install sccache +$ brew install ccache -# For Linux. Note that the version in your package manager may not have S3 support. -$ cargo install sccache --features=s3 +# For Ubuntu/Debian +$ sudo apt install ccache + +# For Arch +$ sudo pacman -S ccache + +# For Fedora +$ sudo dnf install ccache + +# For openSUSE +$ sudo zypper install ccache ``` -This will install `sccache` with S3 support. Our build scripts will automatically detect and use `sccache` with our shared S3 cache. **Note**: Not all versions of `sccache` are compiled with S3 support, hence we recommend installing it via `cargo`. - -#### Registering AWS Credentials for `sccache` (Core Developers Only) - -Core developers have write access to the shared S3 cache. To enable write access, you must log in with AWS credentials. The easiest way to do this is to use the [`aws` CLI](https://aws.amazon.com/cli/) and invoke [`aws configure` to provide your AWS security info](https://docs.aws.amazon.com/cli/latest/reference/configure/). - -The `cmake` scripts should automatically detect your AWS credentials from the environment or the `~/.aws/credentials` file. - -
- Logging in to the `aws` CLI - - 1. Install the AWS CLI by following [the official guide](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html). - 2. Log in to your AWS account console. A team member should provide you with your credentials. - 3. Click your name in the top right > Security credentials. - 4. Scroll to "Access keys" and create a new access key. - 5. Run `aws configure` in your terminal and provide the access key ID and secret access key when prompted. -
- -
- Common Issues You May Encounter - - - To confirm that the cache is being used, you can use the `sccache --show-stats` command right after a build. This will expose very useful statistics, including cache hits/misses. - - If you have multiple AWS profiles configured, ensure that the correct profile is set in the `AWS_PROFILE` environment variable. - - `sccache` follows a server-client model. If you run into weird issues where `sccache` refuses to use S3, even though you have AWS credentials configured, try killing any running `sccache` servers with `sccache --stop-server` and then re-running the build. -
+Our build scripts will automatically detect and use `ccache` if available. You can check cache statistics with `ccache --show-stats`. ## Install LLVM @@ -201,7 +186,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi - Batch up your changes - Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work) - Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code. -- Use debug logs. `BUN_DEBUG_=1` will enable debug logging for the corresponding `Output.scoped(., .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=.log`. Debug logs are aggressively removed in release builds. +- Use debug logs. `BUN_DEBUG_=1` will enable debug logging for the corresponding `Output.scoped(., .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug logs into a file, `BUN_DEBUG=.log`. Debug logs are aggressively removed in release builds. - src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many). ## Code generation scripts diff --git a/LATEST b/LATEST index 1892b92676..80e78df683 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.3.2 +1.3.5 diff --git a/README.md b/README.md index 3c845722d1..82c0626611 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64). curl -fsSL https://bun.com/install | bash # on windows -powershell -c "irm bun.com/install.ps1 | iex" +powershell -c "irm bun.sh/install.ps1 | iex" # with npm npm install -g bun @@ -104,13 +104,13 @@ bun upgrade --canary - [File types (Loaders)](https://bun.com/docs/runtime/loaders) - [TypeScript](https://bun.com/docs/runtime/typescript) - [JSX](https://bun.com/docs/runtime/jsx) - - [Environment variables](https://bun.com/docs/runtime/env) + - [Environment variables](https://bun.com/docs/runtime/environment-variables) - [Bun APIs](https://bun.com/docs/runtime/bun-apis) - [Web APIs](https://bun.com/docs/runtime/web-apis) - - [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis) + - [Node.js compatibility](https://bun.com/docs/runtime/nodejs-compat) - [Single-file executable](https://bun.com/docs/bundler/executables) - [Plugins](https://bun.com/docs/runtime/plugins) - - [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot) + - [Watch mode / Hot Reloading](https://bun.com/docs/runtime/watch-mode) - [Module resolution](https://bun.com/docs/runtime/modules) - [Auto-install](https://bun.com/docs/runtime/autoimport) - [bunfig.toml](https://bun.com/docs/runtime/bunfig) diff --git a/bench/bun.lock b/bench/bun.lock index e9f41f8407..ba5ad596f1 100644 --- a/bench/bun.lock +++ b/bench/bun.lock @@ -1,5 +1,6 @@ { "lockfileVersion": 1, + "configVersion": 0, "workspaces": { "": { "name": "bench", diff --git a/bench/install/bun.lock b/bench/install/bun.lock index 2f2b0b1451..84ca23aed8 100644 --- a/bench/install/bun.lock +++ b/bench/install/bun.lock @@ -1,5 +1,6 @@ { "lockfileVersion": 1, + "configVersion": 0, "workspaces": { "": { "name": "installbench", @@ -12,7 +13,7 @@ "@trpc/server": "^11.0.0", "drizzle-orm": "^0.41.0", "esbuild": "^0.25.11", - "next": "^15.2.3", + "next": "15.5.7", "next-auth": "5.0.0-beta.25", "postgres": "^3.4.4", "react": "^19.0.0", @@ -175,23 +176,23 @@ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.2", "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="], - "@next/env": ["@next/env@15.5.6", "", {}, "sha512-3qBGRW+sCGzgbpc5TS1a0p7eNxnOarGVQhZxfvTdnV0gFI61lX7QNtQ4V1TSREctXzYn5NetbUsLvyqwLFJM6Q=="], + "@next/env": ["@next/env@15.5.7", "", {}, "sha512-4h6Y2NyEkIEN7Z8YxkA27pq6zTkS09bUSYC0xjd0NpwFxjnIKeZEeH591o5WECSmjpUhLn3H2QLJcDye3Uzcvg=="], - "@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ES3nRz7N+L5Umz4KoGfZ4XX6gwHplwPhioVRc25+QNsDa7RtUF/z8wJcbuQ2Tffm5RZwuN2A063eapoJ1u4nPg=="], + "@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.7", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IZwtxCEpI91HVU/rAUOOobWSZv4P2DeTtNaCdHqLcTJU4wdNXgAySvKa/qJCgR5m6KI8UsKDXtO2B31jcaw1Yw=="], - "@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-JIGcytAyk9LQp2/nuVZPAtj8uaJ/zZhsKOASTjxDug0SPU9LAM3wy6nPU735M1OqacR4U20LHVF5v5Wnl9ptTA=="], + "@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.7", "", { "os": "darwin", "cpu": "x64" }, "sha512-UP6CaDBcqaCBuiq/gfCEJw7sPEoX1aIjZHnBWN9v9qYHQdMKvCKcAVs4OX1vIjeE+tC5EIuwDTVIoXpUes29lg=="], - "@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-qvz4SVKQ0P3/Im9zcS2RmfFL/UCQnsJKJwQSkissbngnB/12c6bZTCB0gHTexz1s6d/mD0+egPKXAIRFVS7hQg=="], + "@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-NCslw3GrNIw7OgmRBxHtdWFQYhexoUCq+0oS2ccjyYLtcn1SzGzeM54jpTFonIMUjNbHmpKpziXnpxhSWLcmBA=="], - "@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-FsbGVw3SJz1hZlvnWD+T6GFgV9/NYDeLTNQB2MXoPN5u9VA9OEDy6fJEfePfsUKAhJufFbZLgp0cPxMuV6SV0w=="], + "@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.7", "", { "os": "linux", "cpu": "arm64" }, "sha512-nfymt+SE5cvtTrG9u1wdoxBr9bVB7mtKTcj0ltRn6gkP/2Nu1zM5ei8rwP9qKQP0Y//umK+TtkKgNtfboBxRrw=="], - "@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-3QnHGFWlnvAgyxFxt2Ny8PTpXtQD7kVEeaFat5oPAHHI192WKYB+VIKZijtHLGdBBvc16tiAkPTDmQNOQ0dyrA=="], + "@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-hvXcZvCaaEbCZcVzcY7E1uXN9xWZfFvkNHwbe/n4OkRhFWrs1J1QV+4U1BN06tXLdaS4DazEGXwgqnu/VMcmqw=="], - "@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-OsGX148sL+TqMK9YFaPFPoIaJKbFJJxFzkXZljIgA9hjMjdruKht6xDCEv1HLtlLNfkx3c5w2GLKhj7veBQizQ=="], + "@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.7", "", { "os": "linux", "cpu": "x64" }, "sha512-4IUO539b8FmF0odY6/SqANJdgwn1xs1GkPO5doZugwZ3ETF6JUdckk7RGmsfSf7ws8Qb2YB5It33mvNL/0acqA=="], - "@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-ONOMrqWxdzXDJNh2n60H6gGyKed42Ieu6UTVPZteXpuKbLZTH4G4eBMsr5qWgOBA+s7F+uB4OJbZnrkEDnZ5Fg=="], + "@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.7", "", { "os": "win32", "cpu": "arm64" }, "sha512-CpJVTkYI3ZajQkC5vajM7/ApKJUOlm6uP4BknM3XKvJ7VXAvCqSjSLmM0LKdYzn6nBJVSjdclx8nYJSa3xlTgQ=="], - "@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.6", "", { "os": "win32", "cpu": "x64" }, "sha512-pxK4VIjFRx1MY92UycLOOw7dTdvccWsNETQ0kDHkBlcFH1GrTLUjSiHU1ohrznnux6TqRHgv5oflhfIWZwVROQ=="], + "@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.7", "", { "os": "win32", "cpu": "x64" }, "sha512-gMzgBX164I6DN+9/PGA+9dQiwmTkE4TloBNx8Kv9UiGARsr9Nba7IpcBRA1iTV9vwlYnrE3Uy6I7Aj6qLjQuqw=="], "@panva/hkdf": ["@panva/hkdf@1.2.1", "", {}, "sha512-6oclG6Y3PiDFcoyk8srjLfVKyMfVCKJ27JwNPViuXziFpmdz+MZnZN/aKY0JGXgYuO/VghU0jcOAZgWXZ1Dmrw=="], @@ -323,7 +324,7 @@ "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], - "next": ["next@15.5.6", "", { "dependencies": { "@next/env": "15.5.6", "@swc/helpers": "0.5.15", "caniuse-lite": "1.0.30001752", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.6", "@next/swc-darwin-x64": "15.5.6", "@next/swc-linux-arm64-gnu": "15.5.6", "@next/swc-linux-arm64-musl": "15.5.6", "@next/swc-linux-x64-gnu": "15.5.6", "@next/swc-linux-x64-musl": "15.5.6", "@next/swc-win32-arm64-msvc": "15.5.6", "@next/swc-win32-x64-msvc": "15.5.6", "sharp": "0.34.4" }, "peerDependencies": { "react": "19.2.0", "react-dom": "19.2.0" }, "bin": { "next": "dist/bin/next" } }, "sha512-zTxsnI3LQo3c9HSdSf91O1jMNsEzIXDShXd4wVdg9y5shwLqBXi4ZtUUJyB86KGVSJLZx0PFONvO54aheGX8QQ=="], + "next": ["next@15.5.7", "", { "dependencies": { "@next/env": "15.5.7", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.7", "@next/swc-darwin-x64": "15.5.7", "@next/swc-linux-arm64-gnu": "15.5.7", "@next/swc-linux-arm64-musl": "15.5.7", "@next/swc-linux-x64-gnu": "15.5.7", "@next/swc-linux-x64-musl": "15.5.7", "@next/swc-win32-arm64-msvc": "15.5.7", "@next/swc-win32-x64-msvc": "15.5.7", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-+t2/0jIJ48kUpGKkdlhgkv+zPTEOoXyr60qXe68eB/pl3CMJaLeIGjzp5D6Oqt25hCBiBTt8wEeeAzfJvUKnPQ=="], "next-auth": ["next-auth@5.0.0-beta.25", "", { "dependencies": { "@auth/core": "0.37.2" }, "peerDependencies": { "next": "15.5.6", "react": "19.2.0" } }, "sha512-2dJJw1sHQl2qxCrRk+KTQbeH+izFbGFPuJj5eGgBZFYyiYYtvlrBeUw1E/OJJxTRjuxbSYGnCTkUIRsIIW0bog=="], diff --git a/bench/install/package.json b/bench/install/package.json index 9db93cd9c4..679bb79696 100644 --- a/bench/install/package.json +++ b/bench/install/package.json @@ -26,7 +26,7 @@ "@trpc/server": "^11.0.0", "drizzle-orm": "^0.41.0", "esbuild": "^0.25.11", - "next": "^15.2.3", + "next": "15.5.7", "next-auth": "5.0.0-beta.25", "postgres": "^3.4.4", "react": "^19.0.0", diff --git a/bench/runner.mjs b/bench/runner.mjs index 9f6bcee16f..b9715232f0 100644 --- a/bench/runner.mjs +++ b/bench/runner.mjs @@ -13,7 +13,4 @@ export function run(opts = {}) { } export const bench = Mitata.bench; - -export function group(_name, fn) { - return Mitata.group(fn); -} +export const group = Mitata.group; diff --git a/bench/snippets/array-of.js b/bench/snippets/array-of.js new file mode 100644 index 0000000000..51e0a47bd5 --- /dev/null +++ b/bench/snippets/array-of.js @@ -0,0 +1,335 @@ +import { bench, run } from "../runner.mjs"; + +let sink; + +// Integers +bench("int: Array.of(1,2,3,4,5)", () => { + sink = Array.of(1, 2, 3, 4, 5); +}); + +bench("int: Array.of(100 elements)", () => { + sink = Array.of( + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, + 63, + 64, + 65, + 66, + 67, + 68, + 69, + 70, + 71, + 72, + 73, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 82, + 83, + 84, + 85, + 86, + 87, + 88, + 89, + 90, + 91, + 92, + 93, + 94, + 95, + 96, + 97, + 98, + 99, + ); +}); + +// Doubles +bench("double: Array.of(1.1,2.2,3.3,4.4,5.5)", () => { + sink = Array.of(1.1, 2.2, 3.3, 4.4, 5.5); +}); + +bench("double: Array.of(100 elements)", () => { + sink = Array.of( + 0.1, + 1.1, + 2.1, + 3.1, + 4.1, + 5.1, + 6.1, + 7.1, + 8.1, + 9.1, + 10.1, + 11.1, + 12.1, + 13.1, + 14.1, + 15.1, + 16.1, + 17.1, + 18.1, + 19.1, + 20.1, + 21.1, + 22.1, + 23.1, + 24.1, + 25.1, + 26.1, + 27.1, + 28.1, + 29.1, + 30.1, + 31.1, + 32.1, + 33.1, + 34.1, + 35.1, + 36.1, + 37.1, + 38.1, + 39.1, + 40.1, + 41.1, + 42.1, + 43.1, + 44.1, + 45.1, + 46.1, + 47.1, + 48.1, + 49.1, + 50.1, + 51.1, + 52.1, + 53.1, + 54.1, + 55.1, + 56.1, + 57.1, + 58.1, + 59.1, + 60.1, + 61.1, + 62.1, + 63.1, + 64.1, + 65.1, + 66.1, + 67.1, + 68.1, + 69.1, + 70.1, + 71.1, + 72.1, + 73.1, + 74.1, + 75.1, + 76.1, + 77.1, + 78.1, + 79.1, + 80.1, + 81.1, + 82.1, + 83.1, + 84.1, + 85.1, + 86.1, + 87.1, + 88.1, + 89.1, + 90.1, + 91.1, + 92.1, + 93.1, + 94.1, + 95.1, + 96.1, + 97.1, + 98.1, + 99.1, + ); +}); + +// Objects +bench("object: Array.of(obj x5)", () => { + sink = Array.of({ a: 1 }, { a: 2 }, { a: 3 }, { a: 4 }, { a: 5 }); +}); + +bench("object: Array.of(100 elements)", () => { + sink = Array.of( + { a: 0 }, + { a: 1 }, + { a: 2 }, + { a: 3 }, + { a: 4 }, + { a: 5 }, + { a: 6 }, + { a: 7 }, + { a: 8 }, + { a: 9 }, + { a: 10 }, + { a: 11 }, + { a: 12 }, + { a: 13 }, + { a: 14 }, + { a: 15 }, + { a: 16 }, + { a: 17 }, + { a: 18 }, + { a: 19 }, + { a: 20 }, + { a: 21 }, + { a: 22 }, + { a: 23 }, + { a: 24 }, + { a: 25 }, + { a: 26 }, + { a: 27 }, + { a: 28 }, + { a: 29 }, + { a: 30 }, + { a: 31 }, + { a: 32 }, + { a: 33 }, + { a: 34 }, + { a: 35 }, + { a: 36 }, + { a: 37 }, + { a: 38 }, + { a: 39 }, + { a: 40 }, + { a: 41 }, + { a: 42 }, + { a: 43 }, + { a: 44 }, + { a: 45 }, + { a: 46 }, + { a: 47 }, + { a: 48 }, + { a: 49 }, + { a: 50 }, + { a: 51 }, + { a: 52 }, + { a: 53 }, + { a: 54 }, + { a: 55 }, + { a: 56 }, + { a: 57 }, + { a: 58 }, + { a: 59 }, + { a: 60 }, + { a: 61 }, + { a: 62 }, + { a: 63 }, + { a: 64 }, + { a: 65 }, + { a: 66 }, + { a: 67 }, + { a: 68 }, + { a: 69 }, + { a: 70 }, + { a: 71 }, + { a: 72 }, + { a: 73 }, + { a: 74 }, + { a: 75 }, + { a: 76 }, + { a: 77 }, + { a: 78 }, + { a: 79 }, + { a: 80 }, + { a: 81 }, + { a: 82 }, + { a: 83 }, + { a: 84 }, + { a: 85 }, + { a: 86 }, + { a: 87 }, + { a: 88 }, + { a: 89 }, + { a: 90 }, + { a: 91 }, + { a: 92 }, + { a: 93 }, + { a: 94 }, + { a: 95 }, + { a: 96 }, + { a: 97 }, + { a: 98 }, + { a: 99 }, + ); +}); + +await run(); diff --git a/bench/snippets/compression-streams.mjs b/bench/snippets/compression-streams.mjs new file mode 100644 index 0000000000..b8f3d34cd5 --- /dev/null +++ b/bench/snippets/compression-streams.mjs @@ -0,0 +1,156 @@ +import { bench, group, run } from "../runner.mjs"; + +const runAll = !process.argv.includes("--simple"); + +const small = new Uint8Array(1024); +const medium = new Uint8Array(1024 * 100); +const large = new Uint8Array(1024 * 1024); + +for (let i = 0; i < large.length; i++) { + const value = Math.floor(Math.sin(i / 100) * 128 + 128); + if (i < small.length) small[i] = value; + if (i < medium.length) medium[i] = value; + large[i] = value; +} + +const format = new Intl.NumberFormat("en-US", { notation: "compact", unit: "byte" }); + +async function compress(data, format) { + const cs = new CompressionStream(format); + const writer = cs.writable.getWriter(); + const reader = cs.readable.getReader(); + + writer.write(data); + writer.close(); + + const chunks = []; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + chunks.push(value); + } + + const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0)); + let offset = 0; + for (const chunk of chunks) { + result.set(chunk, offset); + offset += chunk.length; + } + return result; +} + +async function decompress(data, format) { + const ds = new DecompressionStream(format); + const writer = ds.writable.getWriter(); + const reader = ds.readable.getReader(); + + writer.write(data); + writer.close(); + + const chunks = []; + while (true) { + const { done, value } = await reader.read(); + if (done) break; + chunks.push(value); + } + + const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0)); + let offset = 0; + for (const chunk of chunks) { + result.set(chunk, offset); + offset += chunk.length; + } + return result; +} + +async function roundTrip(data, format) { + const compressed = await compress(data, format); + return await decompress(compressed, format); +} + +const formats = ["deflate", "gzip", "deflate-raw"]; +if (runAll) formats.push("brotli", "zstd"); + +// Small data benchmarks (1KB) +group(`CompressionStream ${format.format(small.length)}`, () => { + for (const fmt of formats) { + try { + new CompressionStream(fmt); + bench(fmt, async () => await compress(small, fmt)); + } catch (e) { + // Skip unsupported formats + } + } +}); + +// Medium data benchmarks (100KB) +group(`CompressionStream ${format.format(medium.length)}`, () => { + for (const fmt of formats) { + try { + new CompressionStream(fmt); + bench(fmt, async () => await compress(medium, fmt)); + } catch (e) {} + } +}); + +// Large data benchmarks (1MB) +group(`CompressionStream ${format.format(large.length)}`, () => { + for (const fmt of formats) { + try { + new CompressionStream(fmt); + bench(fmt, async () => await compress(large, fmt)); + } catch (e) { + // Skip unsupported formats + } + } +}); + +const compressedData = {}; +for (const fmt of formats) { + try { + compressedData[fmt] = { + small: await compress(small, fmt), + medium: await compress(medium, fmt), + large: await compress(large, fmt), + }; + } catch (e) { + // Skip unsupported formats + } +} + +group(`DecompressionStream ${format.format(small.length)}`, () => { + for (const fmt of formats) { + if (compressedData[fmt]) { + bench(fmt, async () => await decompress(compressedData[fmt].small, fmt)); + } + } +}); + +group(`DecompressionStream ${format.format(medium.length)}`, () => { + for (const fmt of formats) { + if (compressedData[fmt]) { + bench(fmt, async () => await decompress(compressedData[fmt].medium, fmt)); + } + } +}); + +group(`DecompressionStream ${format.format(large.length)}`, () => { + for (const fmt of formats) { + if (compressedData[fmt]) { + bench(fmt, async () => await decompress(compressedData[fmt].large, fmt)); + } + } +}); + +group(`roundtrip ${format.format(large.length)}`, () => { + for (const fmt of formats) { + try { + new CompressionStream(fmt); + bench(fmt, async () => await roundTrip(large, fmt)); + } catch (e) { + // Skip unsupported formats + } + } +}); + +await run(); diff --git a/bench/snippets/object-hasown.mjs b/bench/snippets/object-hasown.mjs new file mode 100644 index 0000000000..dd5eb84df8 --- /dev/null +++ b/bench/snippets/object-hasown.mjs @@ -0,0 +1,57 @@ +import { bench, run } from "../runner.mjs"; + +const obj = { a: 1, b: 2, c: 3 }; +const objDeep = { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6, g: 7, h: 8 }; +const sym = Symbol("test"); +const objWithSymbol = { [sym]: 1, a: 2 }; + +const objs = [ + { f: 50 }, + { f: 50, g: 70 }, + { g: 50, f: 70 }, + { h: 50, f: 70 }, + { z: 50, f: 70 }, + { k: 50, f: 70 }, +]; + +bench("Object.hasOwn - hit", () => { + return Object.hasOwn(obj, "a"); +}); + +bench("Object.hasOwn - miss", () => { + return Object.hasOwn(obj, "z"); +}); + +bench("Object.hasOwn - symbol hit", () => { + return Object.hasOwn(objWithSymbol, sym); +}); + +bench("Object.hasOwn - symbol miss", () => { + return Object.hasOwn(objWithSymbol, Symbol("other")); +}); + +bench("Object.hasOwn - multiple shapes", () => { + let result = true; + for (let i = 0; i < objs.length; i++) { + result = Object.hasOwn(objs[i], "f") && result; + } + return result; +}); + +bench("Object.prototype.hasOwnProperty - hit", () => { + return obj.hasOwnProperty("a"); +}); + +bench("Object.prototype.hasOwnProperty - miss", () => { + return obj.hasOwnProperty("z"); +}); + +bench("in operator - hit", () => { + return "a" in obj; +}); + +bench("in operator - miss", () => { + return "z" in obj; +}); + +await run(); diff --git a/bench/snippets/promise-race.mjs b/bench/snippets/promise-race.mjs new file mode 100644 index 0000000000..8c12f12bc2 --- /dev/null +++ b/bench/snippets/promise-race.mjs @@ -0,0 +1,7 @@ +import { bench, run } from "../runner.mjs"; + +bench("Promise.race([p1, p2])", async function () { + return await Promise.race([Promise.resolve(1), Promise.resolve(2)]); +}); + +await run(); diff --git a/bench/snippets/response-json.mjs b/bench/snippets/response-json.mjs index 2cd20523b6..28cad6e6c7 100644 --- a/bench/snippets/response-json.mjs +++ b/bench/snippets/response-json.mjs @@ -112,12 +112,40 @@ const obj = { }, }; -bench("Response.json(obj)", async () => { +const smallObj = { id: 1, name: "test" }; + +const arrayObj = { + items: Array.from({ length: 100 }, (_, i) => ({ id: i, value: `item-${i}` })), +}; + +bench("Response.json(obj)", () => { return Response.json(obj); }); -bench("Response.json(obj).json()", async () => { - return await Response.json(obj).json(); +bench("new Response(JSON.stringify(obj))", () => { + return new Response(JSON.stringify(obj), { + headers: { "Content-Type": "application/json" }, + }); +}); + +bench("Response.json(smallObj)", () => { + return Response.json(smallObj); +}); + +bench("new Response(JSON.stringify(smallObj))", () => { + return new Response(JSON.stringify(smallObj), { + headers: { "Content-Type": "application/json" }, + }); +}); + +bench("Response.json(arrayObj)", () => { + return Response.json(arrayObj); +}); + +bench("new Response(JSON.stringify(arrayObj))", () => { + return new Response(JSON.stringify(arrayObj), { + headers: { "Content-Type": "application/json" }, + }); }); await run(); diff --git a/bench/snippets/string-includes.mjs b/bench/snippets/string-includes.mjs new file mode 100644 index 0000000000..daf912ecf1 --- /dev/null +++ b/bench/snippets/string-includes.mjs @@ -0,0 +1,34 @@ +import { bench, run } from "../runner.mjs"; + +const shortStr = "The quick brown fox jumps over the lazy dog"; +const longStr = shortStr.repeat(100); + +bench("String.includes - short, hit (middle)", () => { + return shortStr.includes("jumps"); +}); + +bench("String.includes - short, hit (start)", () => { + return shortStr.includes("The"); +}); + +bench("String.includes - short, hit (end)", () => { + return shortStr.includes("dog"); +}); + +bench("String.includes - short, miss", () => { + return shortStr.includes("cat"); +}); + +bench("String.includes - long, hit (middle)", () => { + return longStr.includes("jumps"); +}); + +bench("String.includes - long, miss", () => { + return longStr.includes("cat"); +}); + +bench("String.includes - with position", () => { + return shortStr.includes("fox", 10); +}); + +await run(); diff --git a/bench/snippets/urlpattern.js b/bench/snippets/urlpattern.js new file mode 100644 index 0000000000..b5e4908c89 --- /dev/null +++ b/bench/snippets/urlpattern.js @@ -0,0 +1,48 @@ +import { bench, group, run } from "../runner.mjs"; + +const patterns = [ + { name: "string pattern", input: "https://(sub.)?example(.com/)foo" }, + { name: "hostname IDN", input: { hostname: "xn--caf-dma.com" } }, + { + name: "pathname + search + hash + baseURL", + input: { + pathname: "/foo", + search: "bar", + hash: "baz", + baseURL: "https://example.com:8080", + }, + }, + { name: "pathname with regex", input: { pathname: "/([[a-z]--a])" } }, + { name: "named groups", input: { pathname: "/users/:id/posts/:postId" } }, + { name: "wildcard", input: { pathname: "/files/*" } }, +]; + +const testURL = "https://sub.example.com/foo"; + +group("URLPattern parse (constructor)", () => { + for (const { name, input } of patterns) { + bench(name, () => { + return new URLPattern(input); + }); + } +}); + +group("URLPattern.test()", () => { + for (const { name, input } of patterns) { + const pattern = new URLPattern(input); + bench(name, () => { + return pattern.test(testURL); + }); + } +}); + +group("URLPattern.exec()", () => { + for (const { name, input } of patterns) { + const pattern = new URLPattern(input); + bench(name, () => { + return pattern.exec(testURL); + }); + } +}); + +await run(); diff --git a/build.zig b/build.zig index eb16d47401..cea24a8dfe 100644 --- a/build.zig +++ b/build.zig @@ -32,6 +32,7 @@ const BunBuildOptions = struct { /// enable debug logs in release builds enable_logs: bool = false, enable_asan: bool, + enable_fuzzilli: bool, enable_valgrind: bool, use_mimalloc: bool, tracy_callstack_depth: u16, @@ -81,6 +82,7 @@ const BunBuildOptions = struct { opts.addOption(bool, "baseline", this.isBaseline()); opts.addOption(bool, "enable_logs", this.enable_logs); opts.addOption(bool, "enable_asan", this.enable_asan); + opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli); opts.addOption(bool, "enable_valgrind", this.enable_valgrind); opts.addOption(bool, "use_mimalloc", this.use_mimalloc); opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version})); @@ -255,6 +257,7 @@ pub fn build(b: *Build) !void { .tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10, .enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false, .enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false, + .enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false, .enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false, .use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false, .llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1, @@ -490,6 +493,7 @@ fn addMultiCheck( .no_llvm = root_build_options.no_llvm, .enable_asan = root_build_options.enable_asan, .enable_valgrind = root_build_options.enable_valgrind, + .enable_fuzzilli = root_build_options.enable_fuzzilli, .use_mimalloc = root_build_options.use_mimalloc, .override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis, }; @@ -603,15 +607,22 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void { obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0; } - obj.no_link_obj = opts.os != .windows; + obj.no_link_obj = opts.os != .windows and !opts.no_llvm; + if (opts.enable_asan and !enableFastBuild(b)) { if (@hasField(Build.Module, "sanitize_address")) { + if (opts.enable_fuzzilli) { + obj.sanitize_coverage_trace_pc_guard = true; + } obj.root_module.sanitize_address = true; } else { const fail_step = b.addFail("asan is not supported on this platform"); obj.step.dependOn(&fail_step.step); } + } else if (opts.enable_fuzzilli) { + const fail_step = b.addFail("fuzzilli requires asan"); + obj.step.dependOn(&fail_step.step); } obj.bundle_compiler_rt = false; obj.bundle_ubsan_rt = false; diff --git a/bun.lock b/bun.lock index e3d7fb8ecd..121ee86803 100644 --- a/bun.lock +++ b/bun.lock @@ -1,6 +1,6 @@ { "lockfileVersion": 1, - "configVersion": 0, + "configVersion": 1, "workspaces": { "": { "name": "bun", @@ -36,6 +36,7 @@ }, "overrides": { "@types/bun": "workspace:packages/@types/bun", + "@types/node": "25.0.0", "bun-types": "workspace:packages/bun-types", }, "packages": { @@ -85,13 +86,13 @@ "@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="], - "@lezer/common": ["@lezer/common@1.2.3", "", {}, "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA=="], + "@lezer/common": ["@lezer/common@1.3.0", "", {}, "sha512-L9X8uHCYU310o99L3/MpJKYxPzXPOS7S0NmBaM7UO/x2Kb2WbmMLSkfvdr1KxRIFYOpbY0Jhn7CfLSUDzL8arQ=="], "@lezer/cpp": ["@lezer/cpp@1.1.3", "", { "dependencies": { "@lezer/common": "^1.2.0", "@lezer/highlight": "^1.0.0", "@lezer/lr": "^1.0.0" } }, "sha512-ykYvuFQKGsRi6IcE+/hCSGUhb/I4WPjd3ELhEblm2wS2cOznDFzO+ubK2c+ioysOnlZ3EduV+MVQFCPzAIoY3w=="], - "@lezer/highlight": ["@lezer/highlight@1.2.1", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA=="], + "@lezer/highlight": ["@lezer/highlight@1.2.3", "", { "dependencies": { "@lezer/common": "^1.3.0" } }, "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g=="], - "@lezer/lr": ["@lezer/lr@1.4.2", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA=="], + "@lezer/lr": ["@lezer/lr@1.4.3", "", { "dependencies": { "@lezer/common": "^1.0.0" } }, "sha512-yenN5SqAxAPv/qMnpWW0AT7l+SxVrgG+u0tNsRQWqbrz66HIl8DnEbBObvy21J5K7+I1v7gsAnlE2VQ5yYVSeA=="], "@octokit/app": ["@octokit/app@14.1.0", "", { "dependencies": { "@octokit/auth-app": "^6.0.0", "@octokit/auth-unauthenticated": "^5.0.0", "@octokit/core": "^5.0.0", "@octokit/oauth-app": "^6.0.0", "@octokit/plugin-paginate-rest": "^9.0.0", "@octokit/types": "^12.0.0", "@octokit/webhooks": "^12.0.4" } }, "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw=="], @@ -145,7 +146,7 @@ "@sentry/types": ["@sentry/types@7.120.4", "", {}, "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q=="], - "@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="], + "@types/aws-lambda": ["@types/aws-lambda@8.10.159", "", {}, "sha512-SAP22WSGNN12OQ8PlCzGzRCZ7QDCwI85dQZbmpz7+mAk+L7j+wI7qnvmdKh+o7A5LaOp6QnOZ2NJphAZQTTHQg=="], "@types/btoa-lite": ["@types/btoa-lite@1.0.2", "", {}, "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg=="], @@ -155,7 +156,7 @@ "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="], - "@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], + "@types/node": ["@types/node@25.0.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-rl78HwuZlaDIUSeUKkmogkhebA+8K1Hy7tddZuJ3D0xV8pZSfsYGTsliGUol1JPzu9EKnTxPC4L1fiWouStRew=="], "aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="], @@ -187,7 +188,7 @@ "deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="], - "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], + "detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="], "dot-case": ["dot-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3" } }, "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w=="], @@ -211,27 +212,29 @@ "jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="], - "lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="], + "lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="], - "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="], + "lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="], - "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="], + "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="], - "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="], + "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="], - "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="], + "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="], - "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="], + "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="], - "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="], + "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="], - "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="], + "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="], - "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="], + "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="], - "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="], + "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="], - "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="], + "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="], + + "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="], "lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="], @@ -287,7 +290,7 @@ "scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="], - "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], + "semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], "sentence-case": ["sentence-case@3.0.4", "", { "dependencies": { "no-case": "^3.0.4", "tslib": "^2.0.3", "upper-case-first": "^2.0.2" } }, "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg=="], @@ -303,7 +306,7 @@ "uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="], - "undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], + "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], "universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="], diff --git a/bunfig.toml b/bunfig.toml index f1bba3259c..82329b401e 100644 --- a/bunfig.toml +++ b/bunfig.toml @@ -10,4 +10,4 @@ preload = "./test/preload.ts" [install] linker = "isolated" -minimumReleaseAge = 1 +minimumReleaseAge = 259200 # three days diff --git a/cmake/CompilerFlags.cmake b/cmake/CompilerFlags.cmake index cff32fb166..bff19e1974 100644 --- a/cmake/CompilerFlags.cmake +++ b/cmake/CompilerFlags.cmake @@ -51,6 +51,23 @@ if(ENABLE_ASAN) ) endif() +if(ENABLE_FUZZILLI) + register_compiler_flags( + DESCRIPTION "Enable coverage instrumentation for fuzzing" + -fsanitize-coverage=trace-pc-guard + ) + + register_linker_flags( + DESCRIPTION "Link coverage instrumentation" + -fsanitize-coverage=trace-pc-guard + ) + + register_compiler_flags( + DESCRIPTION "Enable fuzzilli-specific code" + -DFUZZILLI_ENABLED + ) +endif() + # --- Optimization level --- if(DEBUG) register_compiler_flags( diff --git a/cmake/Options.cmake b/cmake/Options.cmake index ac6ce10c74..e54f6db166 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -127,6 +127,8 @@ if (NOT ENABLE_ASAN) set(ENABLE_ZIG_ASAN OFF) endif() +optionx(ENABLE_FUZZILLI BOOL "If fuzzilli support should be enabled" DEFAULT OFF) + if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN) set(DEFAULT_LTO ON) else() diff --git a/cmake/analysis/RunClangFormat.cmake b/cmake/analysis/RunClangFormat.cmake index 106ac54ef6..f290116b53 100644 --- a/cmake/analysis/RunClangFormat.cmake +++ b/cmake/analysis/RunClangFormat.cmake @@ -34,26 +34,6 @@ register_command( ALWAYS_RUN ) -if(GIT_CHANGED_SOURCES) - set(CLANG_FORMAT_CHANGED_SOURCES) - foreach(source ${CLANG_FORMAT_SOURCES}) - list(FIND GIT_CHANGED_SOURCES ${source} index) - if(NOT ${index} EQUAL -1) - list(APPEND CLANG_FORMAT_CHANGED_SOURCES ${source}) - endif() - endforeach() -endif() - -if(CLANG_FORMAT_CHANGED_SOURCES) - set(CLANG_FORMAT_DIFF_COMMAND ${CLANG_FORMAT_PROGRAM} - -i # edits files in-place - --verbose - ${CLANG_FORMAT_CHANGED_SOURCES} - ) -else() - set(CLANG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-format") -endif() - register_command( TARGET clang-format-diff diff --git a/cmake/analysis/RunClangTidy.cmake b/cmake/analysis/RunClangTidy.cmake index ee5782ade8..23e1422c27 100644 --- a/cmake/analysis/RunClangTidy.cmake +++ b/cmake/analysis/RunClangTidy.cmake @@ -3,7 +3,7 @@ set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES}) set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM} - -p ${BUILD_PATH} + -p ${BUILD_PATH} --config-file=${CWD}/.clang-tidy ) @@ -40,27 +40,6 @@ register_command( ALWAYS_RUN ) -if(GIT_CHANGED_SOURCES) - set(CLANG_TIDY_CHANGED_SOURCES) - foreach(source ${CLANG_TIDY_SOURCES}) - list(FIND GIT_CHANGED_SOURCES ${source} index) - if(NOT ${index} EQUAL -1) - list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source}) - endif() - endforeach() -endif() - -if(CLANG_TIDY_CHANGED_SOURCES) - set(CLANG_TIDY_DIFF_COMMAND ${CLANG_TIDY_PROGRAM} - ${CLANG_TIDY_CHANGED_SOURCES} - --fix - --fix-errors - --fix-notes - ) -else() - set(CLANG_TIDY_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-tidy") -endif() - register_command( TARGET clang-tidy-diff diff --git a/cmake/analysis/RunPrettier.cmake b/cmake/analysis/RunPrettier.cmake index 8c8ceb1ba1..55122f1854 100644 --- a/cmake/analysis/RunPrettier.cmake +++ b/cmake/analysis/RunPrettier.cmake @@ -92,26 +92,6 @@ register_command( ALWAYS_RUN ) -if(GIT_CHANGED_SOURCES) - set(PRETTIER_CHANGED_SOURCES) - foreach(source ${PRETTIER_SOURCES}) - list(FIND GIT_CHANGED_SOURCES ${source} index) - if(NOT ${index} EQUAL -1) - list(APPEND PRETTIER_CHANGED_SOURCES ${source}) - endif() - endforeach() -endif() - -if(PRETTIER_CHANGED_SOURCES) - set(PRETTIER_DIFF_COMMAND ${PRETTIER_COMMAND} - --write - --plugin=prettier-plugin-organize-imports - ${PRETTIER_CHANGED_SOURCES} - ) -else() - set(PRETTIER_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for prettier") -endif() - register_command( TARGET prettier-diff diff --git a/cmake/analysis/RunZigFormat.cmake b/cmake/analysis/RunZigFormat.cmake index 0ff7c23865..9486eb4368 100644 --- a/cmake/analysis/RunZigFormat.cmake +++ b/cmake/analysis/RunZigFormat.cmake @@ -25,25 +25,6 @@ register_command( ALWAYS_RUN ) -if(GIT_CHANGED_SOURCES) - set(ZIG_FORMAT_CHANGED_SOURCES) - foreach(source ${ZIG_FORMAT_SOURCES}) - list(FIND GIT_CHANGED_SOURCES ${source} index) - if(NOT ${index} EQUAL -1) - list(APPEND ZIG_FORMAT_CHANGED_SOURCES ${source}) - endif() - endforeach() -endif() - -if(ZIG_FORMAT_CHANGED_SOURCES) - set(ZIG_FORMAT_DIFF_COMMAND ${ZIG_EXECUTABLE} - fmt - ${ZIG_FORMAT_CHANGED_SOURCES} - ) -else() - set(ZIG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for zig-format") -endif() - register_command( TARGET zig-format-diff diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 43b061846b..74580efaf9 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -695,6 +695,7 @@ register_command( -Dcpu=${ZIG_CPU} -Denable_logs=$,true,false> -Denable_asan=$,true,false> + -Denable_fuzzilli=$,true,false> -Denable_valgrind=$,true,false> -Duse_mimalloc=$,true,false> -Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS} @@ -871,6 +872,7 @@ target_include_directories(${bun} PRIVATE ${CODEGEN_PATH} ${VENDOR_PATH} ${VENDOR_PATH}/picohttpparser + ${VENDOR_PATH}/zlib ${NODEJS_HEADERS_PATH}/include ${NODEJS_HEADERS_PATH}/include/node ) @@ -1198,6 +1200,29 @@ set_target_properties(${bun} PROPERTIES LINK_DEPENDS ${BUN_SYMBOLS_PATH}) include(SetupWebKit) +if(BUN_LINK_ONLY) + register_command( + TARGET + ${bun} + TARGET_PHASE + POST_BUILD + COMMENT + "Uploading link metadata" + COMMAND + ${CMAKE_COMMAND} -E env + BUN_VERSION=${VERSION} + WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL} + WEBKIT_VERSION=${WEBKIT_VERSION} + ZIG_COMMIT=${ZIG_COMMIT} + ${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun} + SOURCES + ${BUN_ZIG_OUTPUT} + ${BUN_CPP_OUTPUT} + ARTIFACTS + ${BUILD_PATH}/link-metadata.json + ) +endif() + if(WIN32) if(DEBUG) target_link_libraries(${bun} PRIVATE diff --git a/cmake/targets/BuildCares.cmake b/cmake/targets/BuildCares.cmake index 2d35e7faf2..c2f1401417 100644 --- a/cmake/targets/BuildCares.cmake +++ b/cmake/targets/BuildCares.cmake @@ -4,7 +4,7 @@ register_repository( REPOSITORY c-ares/c-ares COMMIT - d3a507e920e7af18a5efb7f9f1d8044ed4750013 + 3ac47ee46edd8ea40370222f91613fc16c434853 ) register_cmake_command( diff --git a/cmake/tools/SetupCcache.cmake b/cmake/tools/SetupCcache.cmake index fc1e64aa96..3e1982ca70 100644 --- a/cmake/tools/SetupCcache.cmake +++ b/cmake/tools/SetupCcache.cmake @@ -5,18 +5,12 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none") return() endif() -if (CI AND NOT APPLE) - setenv(CCACHE_DISABLE 1) - return() -endif() find_command( VARIABLE CCACHE_PROGRAM COMMAND ccache - REQUIRED - ${CI} ) if(NOT CCACHE_PROGRAM) diff --git a/cmake/tools/SetupGit.cmake b/cmake/tools/SetupGit.cmake index 7f28fe7743..5e084991a7 100644 --- a/cmake/tools/SetupGit.cmake +++ b/cmake/tools/SetupGit.cmake @@ -4,41 +4,9 @@ find_command( COMMAND git REQUIRED - OFF + ${CI} ) if(NOT GIT_PROGRAM) return() endif() - -set(GIT_DIFF_COMMAND ${GIT_PROGRAM} diff --no-color --name-only --diff-filter=AMCR origin/main HEAD) - -execute_process( - COMMAND - ${GIT_DIFF_COMMAND} - WORKING_DIRECTORY - ${CWD} - OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE - GIT_DIFF - ERROR_STRIP_TRAILING_WHITESPACE - ERROR_VARIABLE - GIT_DIFF_ERROR - RESULT_VARIABLE - GIT_DIFF_RESULT -) - -if(NOT GIT_DIFF_RESULT EQUAL 0) - message(WARNING "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}") - return() -endif() - -string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}") - -if(CI) - set(GIT_CHANGED_SOURCES "${GIT_CHANGED_SOURCES}") - message(STATUS "Set GIT_CHANGED_SOURCES: ${GIT_CHANGED_SOURCES}") -endif() - -list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/) -list(LENGTH GIT_CHANGED_SOURCES GIT_CHANGED_SOURCES_COUNT) diff --git a/cmake/tools/SetupSccache.cmake b/cmake/tools/SetupSccache.cmake deleted file mode 100644 index cb4b5aa750..0000000000 --- a/cmake/tools/SetupSccache.cmake +++ /dev/null @@ -1,123 +0,0 @@ -# Setup sccache as the C and C++ compiler launcher to speed up builds by caching -if(CACHE_STRATEGY STREQUAL "none") - return() -endif() - -set(SCCACHE_SHARED_CACHE_REGION "us-west-1") -set(SCCACHE_SHARED_CACHE_BUCKET "bun-build-sccache-store") - -# Function to check if the system AWS credentials have access to the sccache S3 bucket. -function(check_aws_credentials OUT_VAR) - # Install dependencies first - execute_process( - COMMAND ${BUN_EXECUTABLE} install --frozen-lockfile - WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/scripts/build-cache - RESULT_VARIABLE INSTALL_EXIT_CODE - OUTPUT_VARIABLE INSTALL_OUTPUT - ERROR_VARIABLE INSTALL_ERROR - ) - - if(NOT INSTALL_EXIT_CODE EQUAL 0) - message(FATAL_ERROR "Failed to install dependencies in scripts/build-cache\n" - "Exit code: ${INSTALL_EXIT_CODE}\n" - "Output: ${INSTALL_OUTPUT}\n" - "Error: ${INSTALL_ERROR}") - endif() - - # Check AWS credentials - execute_process( - COMMAND - ${BUN_EXECUTABLE} - run - have-access.ts - --bucket ${SCCACHE_SHARED_CACHE_BUCKET} - --region ${SCCACHE_SHARED_CACHE_REGION} - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR}/scripts/build-cache - RESULT_VARIABLE HAVE_ACCESS_EXIT_CODE - ) - - if(HAVE_ACCESS_EXIT_CODE EQUAL 0) - set(HAS_CREDENTIALS TRUE) - else() - set(HAS_CREDENTIALS FALSE) - endif() - - set(${OUT_VAR} ${HAS_CREDENTIALS} PARENT_SCOPE) -endfunction() - -# Configure sccache to use the local cache only. -function(sccache_configure_local_filesystem) - unsetenv(SCCACHE_BUCKET) - unsetenv(SCCACHE_REGION) - setenv(SCCACHE_DIR "${CACHE_PATH}/sccache") -endfunction() - -# Configure sccache to use the distributed cache (S3 + local). -function(sccache_configure_distributed) - setenv(SCCACHE_BUCKET "${SCCACHE_SHARED_CACHE_BUCKET}") - setenv(SCCACHE_REGION "${SCCACHE_SHARED_CACHE_REGION}") - setenv(SCCACHE_DIR "${CACHE_PATH}/sccache") -endfunction() - -function(sccache_configure_environment_ci) - if(CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "distributed") - check_aws_credentials(HAS_AWS_CREDENTIALS) - if(HAS_AWS_CREDENTIALS) - sccache_configure_distributed() - message(NOTICE "sccache: Using distributed cache strategy.") - else() - message(FATAL_ERROR "CI CACHE_STRATEGY is set to '${CACHE_STRATEGY}', but no valid AWS " - "credentials were found. Note that 'auto' requires AWS credentials to access the shared " - "cache in CI.") - endif() - elseif(CACHE_STRATEGY STREQUAL "local") - # We disallow this because we want our CI runs to always used the shared cache to accelerate - # builds. - # none, distributed and auto are all okay. - # - # If local is configured, it's as good as "none", so this is probably user error. - message(FATAL_ERROR "CI CACHE_STRATEGY is set to 'local', which is not allowed.") - endif() -endfunction() - -function(sccache_configure_environment_developer) - # Local environments can use any strategy they like. S3 is set up in such a way so as to clean - # itself from old entries automatically. - if (CACHE_STRATEGY STREQUAL "auto" OR CACHE_STRATEGY STREQUAL "local") - # In the local environment, we prioritize using the local cache. This is because sccache takes - # into consideration the whole absolute path of the files being compiled, and it's very - # unlikely users will have the same absolute paths on their local machines. - sccache_configure_local_filesystem() - message(NOTICE "sccache: Using local cache strategy.") - elseif(CACHE_STRATEGY STREQUAL "distributed") - check_aws_credentials(HAS_AWS_CREDENTIALS) - if(HAS_AWS_CREDENTIALS) - sccache_configure_distributed() - message(NOTICE "sccache: Using distributed cache strategy.") - else() - message(FATAL_ERROR "CACHE_STRATEGY is set to 'distributed', but no valid AWS credentials " - "were found.") - endif() - endif() -endfunction() - -find_command(VARIABLE SCCACHE_PROGRAM COMMAND sccache REQUIRED ${CI}) -if(NOT SCCACHE_PROGRAM) - message(WARNING "sccache not found. Your builds will be slower.") - return() -endif() - -set(SCCACHE_ARGS CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER) -foreach(arg ${SCCACHE_ARGS}) - setx(${arg} ${SCCACHE_PROGRAM}) - list(APPEND CMAKE_ARGS -D${arg}=${${arg}}) -endforeach() - -setenv(SCCACHE_LOG "info") - -if (CI) - sccache_configure_environment_ci() -else() - sccache_configure_environment_developer() -endif() diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index aa07c060ec..6e2ce7d677 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 6d0f3aac0b817cc01a846b3754b21271adedac12) + set(WEBKIT_VERSION 863778130931e0081a688f48e8479b8ee61b9507) endif() string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX) diff --git a/cmake/tools/SetupZig.cmake b/cmake/tools/SetupZig.cmake index 1de53f05e3..320a6f4e64 100644 --- a/cmake/tools/SetupZig.cmake +++ b/cmake/tools/SetupZig.cmake @@ -55,13 +55,7 @@ optionx(ZIG_OBJECT_FORMAT "obj|bc" "Output file format for Zig object files" DEF optionx(ZIG_LOCAL_CACHE_DIR FILEPATH "The path to local the zig cache directory" DEFAULT ${CACHE_PATH}/zig/local) optionx(ZIG_GLOBAL_CACHE_DIR FILEPATH "The path to the global zig cache directory" DEFAULT ${CACHE_PATH}/zig/global) -if(CI) - set(ZIG_COMPILER_SAFE_DEFAULT ON) -else() - set(ZIG_COMPILER_SAFE_DEFAULT OFF) -endif() - -optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${ZIG_COMPILER_SAFE_DEFAULT}) +optionx(ZIG_COMPILER_SAFE BOOL "Download a ReleaseSafe build of the Zig compiler." DEFAULT ${CI}) setenv(ZIG_LOCAL_CACHE_DIR ${ZIG_LOCAL_CACHE_DIR}) setenv(ZIG_GLOBAL_CACHE_DIR ${ZIG_GLOBAL_CACHE_DIR}) diff --git a/dockerhub/debian-slim/Dockerfile b/dockerhub/debian-slim/Dockerfile index 755a8415ca..996034f9ca 100644 --- a/dockerhub/debian-slim/Dockerfile +++ b/dockerhub/debian-slim/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:bookworm-slim AS build +FROM debian:trixie-slim AS build # https://github.com/oven-sh/bun/releases ARG BUN_VERSION=latest @@ -55,7 +55,7 @@ RUN apt-get update -qq \ && which bun \ && bun --version -FROM debian:bookworm-slim +FROM debian:trixie-slim # Disable the runtime transpiler cache by default inside Docker containers. # On ephemeral containers, the cache is not useful diff --git a/dockerhub/debian/Dockerfile b/dockerhub/debian/Dockerfile index f8ed38c3c5..85a9250d9a 100644 --- a/dockerhub/debian/Dockerfile +++ b/dockerhub/debian/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:bookworm-slim AS build +FROM debian:trixie-slim AS build # https://github.com/oven-sh/bun/releases ARG BUN_VERSION=latest @@ -56,7 +56,7 @@ RUN apt-get update -qq \ && rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \ && chmod +x /usr/local/bin/bun -FROM debian:bookworm +FROM debian:trixie COPY docker-entrypoint.sh /usr/local/bin COPY --from=build /usr/local/bin/bun /usr/local/bin/bun diff --git a/dockerhub/distroless/Dockerfile b/dockerhub/distroless/Dockerfile index 32c8a0f99a..8d4e98d787 100644 --- a/dockerhub/distroless/Dockerfile +++ b/dockerhub/distroless/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:bookworm-slim AS build +FROM debian:trixie-slim AS build # https://github.com/oven-sh/bun/releases ARG BUN_VERSION=latest @@ -55,7 +55,7 @@ RUN apt-get update -qq \ && which bun \ && bun --version -FROM gcr.io/distroless/base-nossl-debian11 +FROM gcr.io/distroless/base-nossl-debian13 # Disable the runtime transpiler cache by default inside Docker containers. # On ephemeral containers, the cache is not useful @@ -71,6 +71,7 @@ ENV PATH "${PATH}:/usr/local/bun-node-fallback-bin" # Temporarily use the `build`-stage image binaries to create a symlink: RUN --mount=type=bind,from=build,source=/usr/bin,target=/usr/bin \ + --mount=type=bind,from=build,source=/etc/alternatives/which,target=/etc/alternatives/which \ --mount=type=bind,from=build,source=/bin,target=/bin \ --mount=type=bind,from=build,source=/usr/lib,target=/usr/lib \ --mount=type=bind,from=build,source=/lib,target=/lib \ diff --git a/docs/bundler/css.mdx b/docs/bundler/css.mdx index 730332f173..7a73280b79 100644 --- a/docs/bundler/css.mdx +++ b/docs/bundler/css.mdx @@ -72,7 +72,7 @@ Bun's CSS bundler automatically converts this nested syntax into traditional fla You can also nest media queries and other at-rules inside selectors, eliminating the need to repeat selector patterns: -```css title="styles.css" icon="file-code" +```scss title="styles.css" icon="file-code" .responsive-element { display: block; diff --git a/docs/bundler/esbuild.mdx b/docs/bundler/esbuild.mdx index 10d6ae7591..a1724d5f3b 100644 --- a/docs/bundler/esbuild.mdx +++ b/docs/bundler/esbuild.mdx @@ -65,6 +65,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot | `--chunk-names` | `--chunk-naming` | Renamed for consistency with naming in JS API | | `--color` | n/a | Always enabled | | `--drop` | `--drop` | | +| n/a | `--feature` | Bun-specific. Enable feature flags for compile-time dead-code elimination via `import { feature } from "bun:bundle"` | | `--entry-names` | `--entry-naming` | Renamed for consistency with naming in JS API | | `--global-name` | n/a | Not applicable, Bun does not support `iife` output at this time | | `--ignore-annotations` | `--ignore-dce-annotations` | | diff --git a/docs/bundler/executables.mdx b/docs/bundler/executables.mdx index 79e6d6a55e..16c623424f 100644 --- a/docs/bundler/executables.mdx +++ b/docs/bundler/executables.mdx @@ -5,18 +5,28 @@ description: "Generate standalone executables from TypeScript or JavaScript file Bun's bundler implements a `--compile` flag for generating a standalone binary from a TypeScript or JavaScript file. - - -```bash terminal icon="terminal" -bun build ./cli.ts --compile --outfile mycli -``` + + + ```bash terminal icon="terminal" + bun build ./cli.ts --compile --outfile mycli + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./cli.ts"], + compile: { + outfile: "./mycli", + }, + }); + ``` + + ```ts cli.ts icon="/icons/typescript.svg" console.log("Hello world!"); ``` - - This bundles `cli.ts` into an executable that can be executed directly: ```bash terminal icon="terminal" @@ -37,49 +47,157 @@ The `--target` flag lets you compile your standalone executable for a different To build for Linux x64 (most servers): -```bash icon="terminal" terminal -bun build --compile --target=bun-linux-x64 ./index.ts --outfile myapp + + + ```bash icon="terminal" terminal + bun build --compile --target=bun-linux-x64 ./index.ts --outfile myapp -# To support CPUs from before 2013, use the baseline version (nehalem) -bun build --compile --target=bun-linux-x64-baseline ./index.ts --outfile myapp + # To support CPUs from before 2013, use the baseline version (nehalem) + bun build --compile --target=bun-linux-x64-baseline ./index.ts --outfile myapp -# To explicitly only support CPUs from 2013 and later, use the modern version (haswell) -# modern is faster, but baseline is more compatible. -bun build --compile --target=bun-linux-x64-modern ./index.ts --outfile myapp -``` + # To explicitly only support CPUs from 2013 and later, use the modern version (haswell) + # modern is faster, but baseline is more compatible. + bun build --compile --target=bun-linux-x64-modern ./index.ts --outfile myapp + ``` + + + + ```ts build.ts icon="/icons/typescript.svg" + // Standard Linux x64 + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + target: "bun-linux-x64", + outfile: "./myapp", + }, + }); + + // Baseline (pre-2013 CPUs) + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + target: "bun-linux-x64-baseline", + outfile: "./myapp", + }, + }); + + // Modern (2013+ CPUs, faster) + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + target: "bun-linux-x64-modern", + outfile: "./myapp", + }, + }); + ``` + + + To build for Linux ARM64 (e.g. Graviton or Raspberry Pi): -```bash icon="terminal" terminal -# Note: the default architecture is x64 if no architecture is specified. -bun build --compile --target=bun-linux-arm64 ./index.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + # Note: the default architecture is x64 if no architecture is specified. + bun build --compile --target=bun-linux-arm64 ./index.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + target: "bun-linux-arm64", + outfile: "./myapp", + }, + }); + ``` + + To build for Windows x64: -```bash icon="terminal" terminal -bun build --compile --target=bun-windows-x64 ./path/to/my/app.ts --outfile myapp + + + ```bash icon="terminal" terminal + bun build --compile --target=bun-windows-x64 ./path/to/my/app.ts --outfile myapp -# To support CPUs from before 2013, use the baseline version (nehalem) -bun build --compile --target=bun-windows-x64-baseline ./path/to/my/app.ts --outfile myapp + # To support CPUs from before 2013, use the baseline version (nehalem) + bun build --compile --target=bun-windows-x64-baseline ./path/to/my/app.ts --outfile myapp -# To explicitly only support CPUs from 2013 and later, use the modern version (haswell) -bun build --compile --target=bun-windows-x64-modern ./path/to/my/app.ts --outfile myapp + # To explicitly only support CPUs from 2013 and later, use the modern version (haswell) + bun build --compile --target=bun-windows-x64-modern ./path/to/my/app.ts --outfile myapp -# note: if no .exe extension is provided, Bun will automatically add it for Windows executables -``` + # note: if no .exe extension is provided, Bun will automatically add it for Windows executables + ``` + + + + ```ts build.ts icon="/icons/typescript.svg" + // Standard Windows x64 + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + target: "bun-windows-x64", + outfile: "./myapp", // .exe added automatically + }, + }); + + // Baseline or modern variants + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + target: "bun-windows-x64-baseline", + outfile: "./myapp", + }, + }); + ``` + + + To build for macOS arm64: -```bash icon="terminal" terminal -bun build --compile --target=bun-darwin-arm64 ./path/to/my/app.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + bun build --compile --target=bun-darwin-arm64 ./path/to/my/app.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + target: "bun-darwin-arm64", + outfile: "./myapp", + }, + }); + ``` + + To build for macOS x64: -```bash icon="terminal" terminal -bun build --compile --target=bun-darwin-x64 ./path/to/my/app.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + bun build --compile --target=bun-darwin-x64 ./path/to/my/app.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + target: "bun-darwin-x64", + outfile: "./myapp", + }, + }); + ``` + + ### Supported targets @@ -90,7 +208,7 @@ The order of the `--target` flag does not matter, as long as they're delimited b | bun-linux-x64 | Linux | x64 | ✅ | ✅ | glibc | | bun-linux-arm64 | Linux | arm64 | ✅ | N/A | glibc | | bun-windows-x64 | Windows | x64 | ✅ | ✅ | - | -| ~~bun-windows-arm64~~ | Windows | arm64 | ❌ | ❌ | - | +| ~~bun-windows-arm64~~ | ~~Windows~~ | ~~arm64~~ | ❌ | ❌ | - | | bun-darwin-x64 | macOS | x64 | ✅ | ✅ | - | | bun-darwin-arm64 | macOS | arm64 | ✅ | N/A | - | | bun-linux-x64-musl | Linux | x64 | ✅ | ✅ | musl | @@ -110,15 +228,33 @@ The order of the `--target` flag does not matter, as long as they're delimited b Use the `--define` flag to inject build-time constants into your executable, such as version numbers, build timestamps, or configuration values: -```bash icon="terminal" terminal -bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli -``` + + + ```bash icon="terminal" terminal + bun build --compile --define BUILD_VERSION='"1.2.3"' --define BUILD_TIME='"2024-01-15T10:30:00Z"' src/cli.ts --outfile mycli + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./src/cli.ts"], + compile: { + outfile: "./mycli", + }, + define: { + BUILD_VERSION: JSON.stringify("1.2.3"), + BUILD_TIME: JSON.stringify("2024-01-15T10:30:00Z"), + }, + }); + ``` + + These constants are embedded directly into your compiled binary at build time, providing zero runtime overhead and enabling dead code elimination optimizations. For comprehensive examples and advanced patterns, see the [Build-time constants - guide](https://bun.com/guides/runtime/build-time-constants). + guide](/guides/runtime/build-time-constants). --- @@ -133,17 +269,50 @@ With compiled executables, you can move that cost from runtime to build-time. When deploying to production, we recommend the following: -```bash icon="terminal" terminal -bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + outfile: "./myapp", + }, + minify: true, + sourcemap: "linked", + }); + ``` + + ### Bytecode compilation To improve startup time, enable bytecode compilation: -```bash icon="terminal" terminal -bun build --compile --minify --sourcemap --bytecode ./path/to/my/app.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + bun build --compile --minify --sourcemap --bytecode ./path/to/my/app.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./path/to/my/app.ts"], + compile: { + outfile: "./myapp", + }, + minify: true, + sourcemap: "linked", + bytecode: true, + }); + ``` + + Using bytecode compilation, `tsc` starts 2x faster: @@ -172,9 +341,24 @@ The `--bytecode` argument enables bytecode compilation. Every time you run JavaS **`--compile-exec-argv="args"`** - Embed runtime arguments that are available via `process.execArgv`: -```bash icon="terminal" terminal -bun build --compile --compile-exec-argv="--smol --user-agent=MyBot" ./app.ts --outfile myapp -``` + + + ```bash icon="terminal" terminal + bun build --compile --compile-exec-argv="--smol --user-agent=MyBot" ./app.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./app.ts"], + compile: { + execArgv: ["--smol", "--user-agent=MyBot"], + outfile: "./myapp", + }, + }); + ``` + + ```ts app.ts icon="/icons/typescript.svg" // In the compiled app @@ -183,6 +367,72 @@ console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"] --- +## Automatic config loading + +Standalone executables can automatically load configuration files from the directory where they are run. By default: + +- **`tsconfig.json`** and **`package.json`** loading is **disabled** — these are typically only needed at development time, and the bundler already uses them when compiling +- **`.env`** and **`bunfig.toml`** loading is **enabled** — these often contain runtime configuration that may vary per deployment + + + In a future version of Bun, `.env` and `bunfig.toml` may also be disabled by default for more deterministic behavior. + + +### Enabling config loading at runtime + +If your executable needs to read `tsconfig.json` or `package.json` at runtime, you can opt in with the new CLI flags: + +```bash icon="terminal" terminal +# Enable runtime loading of tsconfig.json +bun build --compile --compile-autoload-tsconfig ./app.ts --outfile myapp + +# Enable runtime loading of package.json +bun build --compile --compile-autoload-package-json ./app.ts --outfile myapp + +# Enable both +bun build --compile --compile-autoload-tsconfig --compile-autoload-package-json ./app.ts --outfile myapp +``` + +### Disabling config loading at runtime + +To disable `.env` or `bunfig.toml` loading for deterministic execution: + + + + ```bash icon="terminal" terminal + # Disable .env loading + bun build --compile --no-compile-autoload-dotenv ./app.ts --outfile myapp + + # Disable bunfig.toml loading + bun build --compile --no-compile-autoload-bunfig ./app.ts --outfile myapp + + # Disable all config loading + bun build --compile --no-compile-autoload-dotenv --no-compile-autoload-bunfig ./app.ts --outfile myapp + ``` + + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./app.ts"], + compile: { + // tsconfig.json and package.json are disabled by default + autoloadTsconfig: true, // Enable tsconfig.json loading + autoloadPackageJson: true, // Enable package.json loading + + // .env and bunfig.toml are enabled by default + autoloadDotenv: false, // Disable .env loading + autoloadBunfig: false, // Disable bunfig.toml loading + outfile: "./myapp", + }, + }); + ``` + + + + +--- + ## Act as the Bun CLI New in Bun v1.2.16 @@ -259,12 +509,12 @@ console.log(`Server running at http://localhost:${server.port}`);

Hello World

- + ``` -```ts app.js icon="file-code" +```ts app.ts icon="file-code" console.log("Hello from the client!"); ``` @@ -278,9 +528,23 @@ body { To build this into a single executable: -```bash terminal icon="terminal" -bun build --compile ./server.ts --outfile myapp -``` + + + ```bash terminal icon="terminal" + bun build --compile ./server.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./server.ts"], + compile: { + outfile: "./myapp", + }, + }); + ``` + + This creates a self-contained binary that includes: @@ -303,11 +567,25 @@ For more details on building full-stack applications with Bun, see the [full-sta ## Worker -To use workers in a standalone executable, add the worker's entrypoint to the CLI arguments: +To use workers in a standalone executable, add the worker's entrypoint to the build: -```bash terminal icon="terminal" -bun build --compile ./index.ts ./my-worker.ts --outfile myapp -``` + + + ```bash terminal icon="terminal" + bun build --compile ./index.ts ./my-worker.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./index.ts", "./my-worker.ts"], + compile: { + outfile: "./myapp", + }, + }); + ``` + + Then, reference the worker in your code: @@ -351,39 +629,148 @@ cd /home/me/Desktop ## Embed assets & files -Standalone executables support embedding files. +Standalone executables support embedding files directly into the binary. This lets you ship a single executable that contains images, JSON configs, templates, or any other assets your application needs. -To embed files into an executable with `bun build --compile`, import the file in your code. +### How it works + +Use the `with { type: "file" }` [import attribute](https://github.com/tc39/proposal-import-attributes) to embed a file: + +```ts index.ts icon="/icons/typescript.svg" +import icon from "./icon.png" with { type: "file" }; + +console.log(icon); +// During development: "./icon.png" +// After compilation: "$bunfs/icon-a1b2c3d4.png" (internal path) +``` + +The import returns a **path string** that points to the embedded file. At build time, Bun: + +1. Reads the file contents +2. Embeds the data into the executable +3. Replaces the import with an internal path (prefixed with `$bunfs/`) + +You can then read this embedded file using `Bun.file()` or Node.js `fs` APIs. + +### Reading embedded files with Bun.file() + +`Bun.file()` is the recommended way to read embedded files: ```ts index.ts icon="/icons/typescript.svg" -// this becomes an internal file path import icon from "./icon.png" with { type: "file" }; import { file } from "bun"; +// Get file contents as different types +const bytes = await file(icon).arrayBuffer(); // ArrayBuffer +const text = await file(icon).text(); // string (for text files) +const blob = file(icon); // Blob + +// Stream the file in a response export default { fetch(req) { - // Embedded files can be streamed from Response objects - return new Response(file(icon)); + return new Response(file(icon), { + headers: { "Content-Type": "image/png" }, + }); }, }; ``` -Embedded files can be read using `Bun.file`'s functions or the Node.js `fs.readFile` function (in `"node:fs"`). +### Reading embedded files with Node.js fs -For example, to read the contents of the embedded file: +Embedded files work seamlessly with Node.js file system APIs: ```ts index.ts icon="/icons/typescript.svg" import icon from "./icon.png" with { type: "file" }; +import config from "./config.json" with { type: "file" }; +import { readFileSync, promises as fs } from "node:fs"; + +// Synchronous read +const iconBuffer = readFileSync(icon); + +// Async read +const configData = await fs.readFile(config, "utf-8"); +const parsed = JSON.parse(configData); + +// Check file stats +const stats = await fs.stat(icon); +console.log(`Icon size: ${stats.size} bytes`); +``` + +### Practical examples + +#### Embedding a JSON config file + +```ts index.ts icon="/icons/typescript.svg" +import configPath from "./default-config.json" with { type: "file" }; import { file } from "bun"; -const bytes = await file(icon).arrayBuffer(); -// await fs.promises.readFile(icon) -// fs.readFileSync(icon) +// Load the embedded default configuration +const defaultConfig = await file(configPath).json(); + +// Merge with user config if it exists +const userConfig = await file("./user-config.json") + .json() + .catch(() => ({})); +const config = { ...defaultConfig, ...userConfig }; +``` + +#### Serving static assets in an HTTP server + +Use `static` routes in `Bun.serve()` for efficient static file serving: + +```ts server.ts icon="/icons/typescript.svg" +import favicon from "./favicon.ico" with { type: "file" }; +import logo from "./logo.png" with { type: "file" }; +import styles from "./styles.css" with { type: "file" }; +import { file, serve } from "bun"; + +serve({ + static: { + "/favicon.ico": file(favicon), + "/logo.png": file(logo), + "/styles.css": file(styles), + }, + fetch(req) { + return new Response("Not found", { status: 404 }); + }, +}); +``` + +Bun automatically handles Content-Type headers and caching for static routes. + +#### Embedding templates + +```ts index.ts icon="/icons/typescript.svg" +import templatePath from "./email-template.html" with { type: "file" }; +import { file } from "bun"; + +async function sendWelcomeEmail(user: { name: string; email: string }) { + const template = await file(templatePath).text(); + const html = template.replace("{{name}}", user.name).replace("{{email}}", user.email); + + // Send email with the rendered template... +} +``` + +#### Embedding binary files + +```ts index.ts icon="/icons/typescript.svg" +import wasmPath from "./processor.wasm" with { type: "file" }; +import fontPath from "./font.ttf" with { type: "file" }; +import { file } from "bun"; + +// Load a WebAssembly module +const wasmBytes = await file(wasmPath).arrayBuffer(); +const wasmModule = await WebAssembly.instantiate(wasmBytes); + +// Read binary font data +const fontData = await file(fontPath).bytes(); ``` ### Embed SQLite databases -If your application wants to embed a SQLite database, set `type: "sqlite"` in the import attribute and the `embed` attribute to `"true"`. +If your application wants to embed a SQLite database into the compiled executable, set `type: "sqlite"` in the import attribute and the `embed` attribute to `"true"`. + +The database file must already exist on disk. Then, import it in your code: ```ts index.ts icon="/icons/typescript.svg" import myEmbeddedDb from "./my.db" with { type: "sqlite", embed: "true" }; @@ -391,7 +778,19 @@ import myEmbeddedDb from "./my.db" with { type: "sqlite", embed: "true" }; console.log(myEmbeddedDb.query("select * from users LIMIT 1").get()); ``` -This database is read-write, but all changes are lost when the executable exits (since it's stored in memory). +Finally, compile it into a standalone executable: + +```bash terminal icon="terminal" +bun build --compile ./index.ts --outfile mycli +``` + + + The database file must exist on disk when you run `bun build --compile`. The `embed: "true"` attribute tells the + bundler to include the database contents inside the compiled executable. When running normally with `bun run`, the + database file is loaded from disk just like a regular SQLite import. + + +In the compiled executable, the embedded database is read-write, but all changes are lost when the executable exits (since it's stored in memory). ### Embed N-API Addons @@ -407,11 +806,32 @@ Unfortunately, if you're using `@mapbox/node-pre-gyp` or other similar tools, yo ### Embed directories -To embed a directory with `bun build --compile`, use a shell glob in your `bun build` command: +To embed a directory with `bun build --compile`, include file patterns in your build: -```bash terminal icon="terminal" -bun build --compile ./index.ts ./public/**/*.png -``` + + + ```bash terminal icon="terminal" + bun build --compile ./index.ts ./public/**/*.png + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + import { Glob } from "bun"; + + // Expand glob pattern to file list + const glob = new Glob("./public/**/*.png"); + const pngFiles = Array.from(glob.scanSync(".")); + + await Bun.build({ + entrypoints: ["./index.ts", ...pngFiles], + compile: { + outfile: "./myapp", + }, + }); + ``` + + + Then, you can reference the files in your code: @@ -431,47 +851,174 @@ This is honestly a workaround, and we expect to improve this in the future with ### Listing embedded files -To get a list of all embedded files, use `Bun.embeddedFiles`: +`Bun.embeddedFiles` gives you access to all embedded files as `Blob` objects: ```ts index.ts icon="/icons/typescript.svg" import "./icon.png" with { type: "file" }; +import "./data.json" with { type: "file" }; +import "./template.html" with { type: "file" }; import { embeddedFiles } from "bun"; -console.log(embeddedFiles[0].name); // `icon-${hash}.png` +// List all embedded files +for (const blob of embeddedFiles) { + console.log(`${blob.name} - ${blob.size} bytes`); +} +// Output: +// icon-a1b2c3d4.png - 4096 bytes +// data-e5f6g7h8.json - 256 bytes +// template-i9j0k1l2.html - 1024 bytes ``` -`Bun.embeddedFiles` returns an array of `Blob` objects which you can use to get the size, contents, and other properties of the files. +Each item in `Bun.embeddedFiles` is a `Blob` with a `name` property: ```ts -embeddedFiles: Blob[] +embeddedFiles: ReadonlyArray; ``` -The list of embedded files excludes bundled source code like `.ts` and `.js` files. +This is useful for dynamically serving all embedded assets using `static` routes: + +```ts server.ts icon="/icons/typescript.svg" +import "./public/favicon.ico" with { type: "file" }; +import "./public/logo.png" with { type: "file" }; +import "./public/styles.css" with { type: "file" }; +import { embeddedFiles, serve } from "bun"; + +// Build static routes from all embedded files +const staticRoutes: Record = {}; +for (const blob of embeddedFiles) { + // Remove hash from filename: "icon-a1b2c3d4.png" -> "icon.png" + const name = blob.name.replace(/-[a-f0-9]+\./, "."); + staticRoutes[`/${name}`] = blob; +} + +serve({ + static: staticRoutes, + fetch(req) { + return new Response("Not found", { status: 404 }); + }, +}); +``` + + + `Bun.embeddedFiles` excludes bundled source code (`.ts`, `.js`, etc.) to help protect your application's source. + #### Content hash By default, embedded files have a content hash appended to their name. This is useful for situations where you want to serve the file from a URL or CDN and have fewer cache invalidation issues. But sometimes, this is unexpected and you might want the original name instead: -To disable the content hash, pass `--asset-naming` to `bun build --compile` like this: +To disable the content hash, configure asset naming: -```bash terminal icon="terminal" -bun build --compile --asset-naming="[name].[ext]" ./index.ts -``` + + + ```bash terminal icon="terminal" + bun build --compile --asset-naming="[name].[ext]" ./index.ts + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + outfile: "./myapp", + }, + naming: { + asset: "[name].[ext]", + }, + }); + ``` + + --- ## Minification -To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller. +To trim down the size of the executable, enable minification: + + + + ```bash terminal icon="terminal" + bun build --compile --minify ./index.ts --outfile myapp + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + outfile: "./myapp", + }, + minify: true, // Enable all minification + }); + + // Or granular control: + await Bun.build({ + entrypoints: ["./index.ts"], + compile: { + outfile: "./myapp", + }, + minify: { + whitespace: true, + syntax: true, + identifiers: true, + }, + }); + ``` + + + + +This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller. --- ## Windows-specific flags -When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file: +When compiling a standalone executable on Windows, there are platform-specific options to customize metadata on the generated `.exe` file: -- `--windows-icon=path/to/icon.ico` to customize the executable file icon. -- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY. + + + ```bash terminal icon="terminal" + # Custom icon + bun build --compile --windows-icon=path/to/icon.ico ./app.ts --outfile myapp + + # Hide console window (for GUI apps) + bun build --compile --windows-hide-console ./app.ts --outfile myapp + ``` + + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./app.ts"], + compile: { + outfile: "./myapp", + windows: { + icon: "./path/to/icon.ico", + hideConsole: true, + // Additional Windows metadata: + title: "My Application", + publisher: "My Company", + version: "1.0.0", + description: "A standalone Windows application", + copyright: "Copyright 2024", + }, + }, + }); + ``` + + + +Available Windows options: + +- `icon` - Path to `.ico` file for the executable icon +- `hideConsole` - Disable the background terminal (for GUI apps) +- `title` - Application title in file properties +- `publisher` - Publisher name in file properties +- `version` - Version string in file properties +- `description` - Description in file properties +- `copyright` - Copyright notice in file properties These flags currently cannot be used when cross-compiling because they depend on Windows APIs. @@ -524,12 +1071,207 @@ codesign -vvv --verify ./myapp --- +## Code splitting + +Standalone executables support code splitting. Use `--compile` with `--splitting` to create an executable that loads code-split chunks at runtime. + + + + ```bash terminal icon="terminal" + bun build --compile --splitting ./src/entry.ts --outdir ./build + ``` + + + ```ts build.ts icon="/icons/typescript.svg" + await Bun.build({ + entrypoints: ["./src/entry.ts"], + compile: true, + splitting: true, + outdir: "./build", + }); + ``` + + + + + +```ts src/entry.ts icon="/icons/typescript.svg" +console.log("Entrypoint loaded"); +const lazy = await import("./lazy.ts"); +lazy.hello(); +``` + +```ts src/lazy.ts icon="/icons/typescript.svg" +export function hello() { + console.log("Lazy module loaded"); +} +``` + + + +```bash terminal icon="terminal" +./build/entry +``` + +```txt +Entrypoint loaded +Lazy module loaded +``` + +--- + +## Using plugins + +Plugins work with standalone executables, allowing you to transform files during the build process: + +```ts build.ts icon="/icons/typescript.svg" +import type { BunPlugin } from "bun"; + +const envPlugin: BunPlugin = { + name: "env-loader", + setup(build) { + build.onLoad({ filter: /\.env\.json$/ }, async args => { + // Transform .env.json files into validated config objects + const env = await Bun.file(args.path).json(); + + return { + contents: `export default ${JSON.stringify(env)};`, + loader: "js", + }; + }); + }, +}; + +await Bun.build({ + entrypoints: ["./cli.ts"], + compile: { + outfile: "./mycli", + }, + plugins: [envPlugin], +}); +``` + +Example use case - embedding environment config at build time: + +```ts cli.ts icon="/icons/typescript.svg" +import config from "./config.env.json"; + +console.log(`Running in ${config.environment} mode`); +console.log(`API endpoint: ${config.apiUrl}`); +``` + +Plugins can perform any transformation: compile YAML/TOML configs, inline SQL queries, generate type-safe API clients, or preprocess templates. Refer to the [plugin documentation](/bundler/plugins) for more details. + +--- + ## Unsupported CLI arguments Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags: -- `--outdir` — use `outfile` instead. -- `--splitting` +- `--outdir` — use `outfile` instead (except when using with `--splitting`). - `--public-path` - `--target=node` or `--target=browser` - `--no-bundle` - we always bundle everything into the executable. + +--- + +## API reference + +The `compile` option in `Bun.build()` accepts three forms: + +```ts title="types" icon="/icons/typescript.svg" +interface BuildConfig { + entrypoints: string[]; + compile: boolean | Bun.Build.Target | CompileBuildOptions; + // ... other BuildConfig options (minify, sourcemap, define, plugins, etc.) +} + +interface CompileBuildOptions { + target?: Bun.Build.Target; // Cross-compilation target + outfile?: string; // Output executable path + execArgv?: string[]; // Runtime arguments (process.execArgv) + autoloadTsconfig?: boolean; // Load tsconfig.json (default: false) + autoloadPackageJson?: boolean; // Load package.json (default: false) + autoloadDotenv?: boolean; // Load .env files (default: true) + autoloadBunfig?: boolean; // Load bunfig.toml (default: true) + windows?: { + icon?: string; // Path to .ico file + hideConsole?: boolean; // Hide console window + title?: string; // Application title + publisher?: string; // Publisher name + version?: string; // Version string + description?: string; // Description + copyright?: string; // Copyright notice + }; +} +``` + +Usage forms: + +```ts icon="/icons/typescript.svg" +// Simple boolean - compile for current platform (uses entrypoint name as output) +compile: true + +// Target string - cross-compile (uses entrypoint name as output) +compile: "bun-linux-x64" + +// Full options object - specify outfile and other options +compile: { + target: "bun-linux-x64", + outfile: "./myapp", +} +``` + +### Supported targets + +```ts title="Bun.Build.Target" icon="/icons/typescript.svg" +type Target = + | "bun-darwin-x64" + | "bun-darwin-x64-baseline" + | "bun-darwin-arm64" + | "bun-linux-x64" + | "bun-linux-x64-baseline" + | "bun-linux-x64-modern" + | "bun-linux-arm64" + | "bun-linux-x64-musl" + | "bun-linux-arm64-musl" + | "bun-windows-x64" + | "bun-windows-x64-baseline" + | "bun-windows-x64-modern"; +``` + +### Complete example + +```ts build.ts icon="/icons/typescript.svg" +import type { BunPlugin } from "bun"; + +const myPlugin: BunPlugin = { + name: "my-plugin", + setup(build) { + // Plugin implementation + }, +}; + +const result = await Bun.build({ + entrypoints: ["./src/cli.ts"], + compile: { + target: "bun-linux-x64", + outfile: "./dist/mycli", + execArgv: ["--smol"], + autoloadDotenv: false, + autoloadBunfig: false, + }, + minify: true, + sourcemap: "linked", + bytecode: true, + define: { + "process.env.NODE_ENV": JSON.stringify("production"), + VERSION: JSON.stringify("1.0.0"), + }, + plugins: [myPlugin], +}); + +if (result.success) { + console.log("Build successful:", result.outputs[0].path); +} +``` diff --git a/docs/bundler/fullstack.mdx b/docs/bundler/fullstack.mdx index 937d41d39f..dce67e59a0 100644 --- a/docs/bundler/fullstack.mdx +++ b/docs/bundler/fullstack.mdx @@ -427,8 +427,8 @@ This will allow you to use TailwindCSS utility classes in your HTML and CSS file - + @@ -448,8 +448,8 @@ Alternatively, you can import TailwindCSS in your CSS file: - + @@ -492,6 +492,28 @@ Bun will lazily resolve and load each plugin and use them to bundle your routes. the CLI. +## Inline Environment Variables + +Bun can replace `process.env.*` references in your frontend JavaScript and TypeScript with their actual values at build time. Configure the `env` option in your `bunfig.toml`: + +```toml title="bunfig.toml" icon="settings" +[serve.static] +env = "PUBLIC_*" # only inline env vars starting with PUBLIC_ (recommended) +# env = "inline" # inline all environment variables +# env = "disable" # disable env var replacement (default) +``` + + + This only works with literal `process.env.FOO` references, not `import.meta.env` or indirect access like `const env = + process.env; env.FOO`. + +If an environment variable is not set, you may see runtime errors like `ReferenceError: process + is not defined` in the browser. + + + +See the [HTML & static sites documentation](/bundler/html-static#inline-environment-variables) for more details on build-time configuration and examples. + ## How It Works Bun uses `HTMLRewriter` to scan for `"); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/file-url-to-path.mdx b/docs/guides/util/file-url-to-path.mdx index 1ff86fc31b..3990f3befb 100644 --- a/docs/guides/util/file-url-to-path.mdx +++ b/docs/guides/util/file-url-to-path.mdx @@ -13,4 +13,4 @@ Bun.fileURLToPath("file:///path/to/file.txt"); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/gzip.mdx b/docs/guides/util/gzip.mdx index 84b9d97067..0a3763f9ed 100644 --- a/docs/guides/util/gzip.mdx +++ b/docs/guides/util/gzip.mdx @@ -17,4 +17,4 @@ const decompressed = Bun.gunzipSync(compressed); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/hash-a-password.mdx b/docs/guides/util/hash-a-password.mdx index d45d6da6e2..ef01dde286 100644 --- a/docs/guides/util/hash-a-password.mdx +++ b/docs/guides/util/hash-a-password.mdx @@ -53,4 +53,4 @@ const isMatch = await Bun.password.verify(password, hash); --- -See [Docs > API > Hashing](https://bun.com/docs/api/hashing#bun-password) for complete documentation. +See [Docs > API > Hashing](/runtime/hashing#bun-password) for complete documentation. diff --git a/docs/guides/util/import-meta-dir.mdx b/docs/guides/util/import-meta-dir.mdx index 2890273034..8a2355cd89 100644 --- a/docs/guides/util/import-meta-dir.mdx +++ b/docs/guides/util/import-meta-dir.mdx @@ -4,7 +4,7 @@ sidebarTitle: import.meta.dir mode: center --- -Bun provides a handful of module-specific utilities on the [`import.meta`](https://bun.com/docs/api/import-meta) object. +Bun provides a handful of module-specific utilities on the [`import.meta`](/runtime/module-resolution#import-meta) object. ```ts /a/b/c.ts icon="/icons/typescript.svg" import.meta.dir; // => "/a/b" @@ -12,4 +12,4 @@ import.meta.dir; // => "/a/b" --- -See [Docs > API > import.meta](https://bun.com/docs/api/import-meta) for complete documentation. +See [Docs > API > import.meta](/runtime/module-resolution#import-meta) for complete documentation. diff --git a/docs/guides/util/import-meta-file.mdx b/docs/guides/util/import-meta-file.mdx index cf16226b35..01edbafa66 100644 --- a/docs/guides/util/import-meta-file.mdx +++ b/docs/guides/util/import-meta-file.mdx @@ -4,7 +4,7 @@ sidebarTitle: import.meta.file mode: center --- -Bun provides a handful of module-specific utilities on the [`import.meta`](https://bun.com/docs/api/import-meta) object. Use `import.meta.file` to retrieve the name of the current file. +Bun provides a handful of module-specific utilities on the [`import.meta`](/runtime/module-resolution#import-meta) object. Use `import.meta.file` to retrieve the name of the current file. ```ts /a/b/c.ts icon="/icons/typescript.svg" import.meta.file; // => "c.ts" @@ -12,4 +12,4 @@ import.meta.file; // => "c.ts" --- -See [Docs > API > import.meta](https://bun.com/docs/api/import-meta) for complete documentation. +See [Docs > API > import.meta](/runtime/module-resolution#import-meta) for complete documentation. diff --git a/docs/guides/util/import-meta-path.mdx b/docs/guides/util/import-meta-path.mdx index 3e1f20aec8..80e92bfd1f 100644 --- a/docs/guides/util/import-meta-path.mdx +++ b/docs/guides/util/import-meta-path.mdx @@ -4,7 +4,7 @@ sidebarTitle: import.meta.path mode: center --- -Bun provides a handful of module-specific utilities on the [`import.meta`](https://bun.com/docs/api/import-meta) object. Use `import.meta.path` to retrieve the absolute path of the current file. +Bun provides a handful of module-specific utilities on the [`import.meta`](/runtime/module-resolution#import-meta) object. Use `import.meta.path` to retrieve the absolute path of the current file. ```ts /a/b/c.ts icon="/icons/typescript.svg" import.meta.path; // => "/a/b/c.ts" @@ -12,4 +12,4 @@ import.meta.path; // => "/a/b/c.ts" --- -See [Docs > API > import.meta](https://bun.com/docs/api/import-meta) for complete documentation. +See [Docs > API > import.meta](/runtime/module-resolution#import-meta) for complete documentation. diff --git a/docs/guides/util/javascript-uuid.mdx b/docs/guides/util/javascript-uuid.mdx index 1d9a0ea0f6..1c0d4bfa6b 100644 --- a/docs/guides/util/javascript-uuid.mdx +++ b/docs/guides/util/javascript-uuid.mdx @@ -8,7 +8,7 @@ Use `crypto.randomUUID()` to generate a UUID v4. This API works in Bun, Node.js, ```ts crypto.randomUUID(); -// => "123e4567-e89b-12d3-a456-426614174000" +// => "123e4567-e89b-42d3-a456-426614174000" ``` --- @@ -22,4 +22,4 @@ Bun.randomUUIDv7(); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/main.mdx b/docs/guides/util/main.mdx index f308c64a7c..ea41dbf2d1 100644 --- a/docs/guides/util/main.mdx +++ b/docs/guides/util/main.mdx @@ -40,4 +40,4 @@ bun run foo.ts --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/path-to-file-url.mdx b/docs/guides/util/path-to-file-url.mdx index aac29788df..65ffe7b201 100644 --- a/docs/guides/util/path-to-file-url.mdx +++ b/docs/guides/util/path-to-file-url.mdx @@ -13,4 +13,4 @@ Bun.pathToFileURL("/path/to/file.txt"); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/sleep.mdx b/docs/guides/util/sleep.mdx index 30daedc805..738738c213 100644 --- a/docs/guides/util/sleep.mdx +++ b/docs/guides/util/sleep.mdx @@ -21,4 +21,4 @@ await new Promise(resolve => setTimeout(resolve, ms)); --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/upgrade.mdx b/docs/guides/util/upgrade.mdx new file mode 100644 index 0000000000..dea2faacd1 --- /dev/null +++ b/docs/guides/util/upgrade.mdx @@ -0,0 +1,93 @@ +--- +title: Upgrade Bun to the latest version +sidebarTitle: Upgrade Bun +mode: center +--- + +Bun can upgrade itself using the built-in `bun upgrade` command. This is the fastest way to get the latest features and bug fixes. + +```bash terminal icon="terminal" +bun upgrade +``` + +This downloads and installs the latest stable version of Bun, replacing the currently installed version. + +To see the current version of Bun, run `bun --version`. + +--- + +## Verify the upgrade + +After upgrading, verify the new version: + +```bash terminal icon="terminal" +bun --version +# Output: 1.x.y + +# See the exact commit of the Bun binary +bun --revision +# Output: 1.x.y+abc123def +``` + +--- + +## Upgrade to canary builds + +Canary builds are automatically released on every commit to the `main` branch. These are untested but useful for trying new features or verifying bug fixes before they're released. + +```bash terminal icon="terminal" +bun upgrade --canary +``` + +Canary builds are not recommended for production use. They may contain bugs or breaking changes. + +--- + +## Switch back to stable + +If you're on a canary build and want to return to the latest stable release: + +```bash terminal icon="terminal" +bun upgrade --stable +``` + +--- + +## Install a specific version + +To install a specific version of Bun, use the install script with a version tag: + + + + ```bash terminal icon="terminal" + curl -fsSL https://bun.sh/install | bash -s "bun-v1.3.3" + ``` + + + ```powershell PowerShell icon="windows" + iex "& {$(irm https://bun.sh/install.ps1)} -Version 1.3.3" + ``` + + + +--- + +## Package manager users + +If you installed Bun via a package manager, use that package manager to upgrade instead of `bun upgrade` to avoid conflicts. + + +**Homebrew users**
+To avoid conflicts with Homebrew, use `brew upgrade bun` instead. + +**Scoop users**
+To avoid conflicts with Scoop, use `scoop update bun` instead. + +
+ +--- + +## See also + +- [Installation](/installation) — Install Bun for the first time +- [Update packages](/pm/cli/update) — Update dependencies to latest versions diff --git a/docs/guides/util/version.mdx b/docs/guides/util/version.mdx index 73b3942744..6348fbf9e5 100644 --- a/docs/guides/util/version.mdx +++ b/docs/guides/util/version.mdx @@ -7,7 +7,7 @@ mode: center Get the current version of Bun in a semver format. ```ts index.ts icon="/icons/typescript.svg" -Bun.version; // => "1.3.2" +Bun.version; // => "1.3.3" ``` --- @@ -20,4 +20,4 @@ Bun.revision; // => "49231b2cb9aa48497ab966fc0bb6b742dacc4994" --- -See [Docs > API > Utils](https://bun.com/docs/api/utils) for more useful utilities. +See [Docs > API > Utils](/runtime/utils) for more useful utilities. diff --git a/docs/guides/util/which-path-to-executable-bin.mdx b/docs/guides/util/which-path-to-executable-bin.mdx index fe9cda53a6..e524561089 100644 --- a/docs/guides/util/which-path-to-executable-bin.mdx +++ b/docs/guides/util/which-path-to-executable-bin.mdx @@ -14,4 +14,4 @@ Bun.which("bun"); // => "/home/user/.bun/bin/bun" --- -See [Docs > API > Utils](https://bun.com/docs/api/utils#bun-which) for complete documentation. +See [Docs > API > Utils](/runtime/utils#bun-which) for complete documentation. diff --git a/docs/guides/websocket/context.mdx b/docs/guides/websocket/context.mdx index d313de2605..3104ec9a12 100644 --- a/docs/guides/websocket/context.mdx +++ b/docs/guides/websocket/context.mdx @@ -6,10 +6,10 @@ mode: center When building a WebSocket server, it's typically necessary to store some identifying information or context associated with each connected client. -With [Bun.serve()](https://bun.com/docs/api/websockets contextual-data), this "contextual data" is set when the connection is initially upgraded by passing a `data` parameter in the `server.upgrade()` call. +With [Bun.serve()](/runtime/http/websockets#contextual-data), this "contextual data" is set when the connection is initially upgraded by passing a `data` parameter in the `server.upgrade()` call. ```ts server.ts icon="/icons/typescript.svg" -Bun.serve<{ socketId: number }>({ +Bun.serve({ fetch(req, server) { const success = server.upgrade(req, { data: { @@ -22,6 +22,9 @@ Bun.serve<{ socketId: number }>({ // ... }, websocket: { + // TypeScript: specify the type of ws.data like this + data: {} as { socketId: number }, + // define websocket handlers async message(ws, message) { // the contextual data is available as the `data` property @@ -43,8 +46,7 @@ type WebSocketData = { userId: string; }; -// TypeScript: specify the type of `data` -Bun.serve({ +Bun.serve({ async fetch(req, server) { // use a library to parse cookies const cookies = parseCookies(req.headers.get("Cookie")); @@ -62,6 +64,9 @@ Bun.serve({ if (upgraded) return undefined; }, websocket: { + // TypeScript: specify the type of ws.data like this + data: {} as WebSocketData, + async message(ws, message) { // save the message to a database await saveMessageToDatabase({ diff --git a/docs/guides/websocket/pubsub.mdx b/docs/guides/websocket/pubsub.mdx index b70c094046..056d37b838 100644 --- a/docs/guides/websocket/pubsub.mdx +++ b/docs/guides/websocket/pubsub.mdx @@ -9,7 +9,7 @@ Bun's server-side `WebSocket` API provides a native pub-sub API. Sockets can be This code snippet implements a simple single-channel chat server. ```ts server.ts icon="/icons/typescript.svg" -const server = Bun.serve<{ username: string }>({ +const server = Bun.serve({ fetch(req, server) { const cookies = req.headers.get("cookie"); const username = getUsernameFromCookies(cookies); @@ -19,6 +19,9 @@ const server = Bun.serve<{ username: string }>({ return new Response("Hello world"); }, websocket: { + // TypeScript: specify the type of ws.data like this + data: {} as { username: string }, + open(ws) { const msg = `${ws.data.username} has entered the chat`; ws.subscribe("the-group-chat"); diff --git a/docs/guides/websocket/simple.mdx b/docs/guides/websocket/simple.mdx index 1a2dd2dc15..6239d13365 100644 --- a/docs/guides/websocket/simple.mdx +++ b/docs/guides/websocket/simple.mdx @@ -4,12 +4,12 @@ sidebarTitle: Simple server mode: center --- -Start a simple WebSocket server using [`Bun.serve`](https://bun.com/docs/api/http). +Start a simple WebSocket server using [`Bun.serve`](/runtime/http/server). Inside `fetch`, we attempt to upgrade incoming `ws:` or `wss:` requests to WebSocket connections. ```ts server.ts icon="/icons/typescript.svg" -const server = Bun.serve<{ authToken: string }>({ +const server = Bun.serve({ fetch(req, server) { const success = server.upgrade(req); if (success) { @@ -22,6 +22,9 @@ const server = Bun.serve<{ authToken: string }>({ return new Response("Hello world!"); }, websocket: { + // TypeScript: specify the type of ws.data like this + data: {} as { authToken: string }, + // this is called when a message is received async message(ws, message) { console.log(`Received ${message}`); diff --git a/docs/guides/write-file/basic.mdx b/docs/guides/write-file/basic.mdx index 4edd4207a2..6f3eec5287 100644 --- a/docs/guides/write-file/basic.mdx +++ b/docs/guides/write-file/basic.mdx @@ -6,7 +6,7 @@ mode: center This code snippet writes a string to disk at a particular _absolute path_. -It uses the fast [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_; the second is the _data_ to write. +It uses the fast [`Bun.write()`](/runtime/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_; the second is the _data_ to write. ```ts const path = "/path/to/file.txt"; @@ -43,4 +43,4 @@ const bytes = await Bun.write(path, "Lorem ipsum"); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/blob.mdx b/docs/guides/write-file/blob.mdx index ccaaaefc78..78810b740d 100644 --- a/docs/guides/write-file/blob.mdx +++ b/docs/guides/write-file/blob.mdx @@ -6,7 +6,7 @@ mode: center This code snippet writes a `Blob` to disk at a particular path. -It uses the fast [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. +It uses the fast [`Bun.write()`](/runtime/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. ```ts const path = "/path/to/file.txt"; @@ -27,4 +27,4 @@ await Bun.write(path, data); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/cat.mdx b/docs/guides/write-file/cat.mdx index 44b5d91cf0..92ca362e04 100644 --- a/docs/guides/write-file/cat.mdx +++ b/docs/guides/write-file/cat.mdx @@ -4,7 +4,7 @@ sidebarTitle: Write file to stdout mode: center --- -Bun exposes `stdout` as a `BunFile` with the `Bun.stdout` property. This can be used as a destination for [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write). +Bun exposes `stdout` as a `BunFile` with the `Bun.stdout` property. This can be used as a destination for [`Bun.write()`](/runtime/file-io#writing-files-bun-write). This code writes a file to `stdout` similar to the `cat` command in Unix. @@ -16,4 +16,4 @@ await Bun.write(Bun.stdout, file); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/file-cp.mdx b/docs/guides/write-file/file-cp.mdx index d43d5f7225..d8eb08d67c 100644 --- a/docs/guides/write-file/file-cp.mdx +++ b/docs/guides/write-file/file-cp.mdx @@ -6,7 +6,7 @@ mode: center This code snippet copies a file to another location on disk. -It uses the fast [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. +It uses the fast [`Bun.write()`](/runtime/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. ```ts const file = Bun.file("/path/to/original.txt"); @@ -15,4 +15,4 @@ await Bun.write("/path/to/copy.txt", file); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/filesink.mdx b/docs/guides/write-file/filesink.mdx index 5414ed933f..68333e5445 100644 --- a/docs/guides/write-file/filesink.mdx +++ b/docs/guides/write-file/filesink.mdx @@ -51,4 +51,4 @@ writer.end(); --- -Full documentation: [FileSink](https://bun.com/docs/api/file-io#incremental-writing-with-filesink). +Full documentation: [FileSink](/runtime/file-io#incremental-writing-with-filesink). diff --git a/docs/guides/write-file/response.mdx b/docs/guides/write-file/response.mdx index 2f0721623a..20e04ff69c 100644 --- a/docs/guides/write-file/response.mdx +++ b/docs/guides/write-file/response.mdx @@ -6,7 +6,7 @@ mode: center This code snippet writes a `Response` to disk at a particular path. Bun will consume the `Response` body according to its `Content-Type` header. -It uses the fast [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. +It uses the fast [`Bun.write()`](/runtime/file-io#writing-files-bun-write) API to efficiently write data to disk. The first argument is a _destination_, like an absolute path or `BunFile` instance. The second argument is the _data_ to write. ```ts const result = await fetch("https://bun.com"); @@ -16,4 +16,4 @@ await Bun.write(path, result); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/stdout.mdx b/docs/guides/write-file/stdout.mdx index 1e980c0947..3c1d9fd681 100644 --- a/docs/guides/write-file/stdout.mdx +++ b/docs/guides/write-file/stdout.mdx @@ -12,7 +12,7 @@ console.log("Lorem ipsum"); --- -For more advanced use cases, Bun exposes `stdout` as a `BunFile` via the `Bun.stdout` property. This can be used as a destination for [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write). +For more advanced use cases, Bun exposes `stdout` as a `BunFile` via the `Bun.stdout` property. This can be used as a destination for [`Bun.write()`](/runtime/file-io#writing-files-bun-write). ```ts await Bun.write(Bun.stdout, "Lorem ipsum"); @@ -20,4 +20,4 @@ await Bun.write(Bun.stdout, "Lorem ipsum"); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/stream.mdx b/docs/guides/write-file/stream.mdx index 739c60c320..1c13bd5c48 100644 --- a/docs/guides/write-file/stream.mdx +++ b/docs/guides/write-file/stream.mdx @@ -4,7 +4,7 @@ sidebarTitle: Write stream mode: center --- -To write a `ReadableStream` to disk, first create a `Response` instance from the stream. This `Response` can then be written to disk using [`Bun.write()`](https://bun.com/docs/api/file-io#writing-files-bun-write). +To write a `ReadableStream` to disk, first create a `Response` instance from the stream. This `Response` can then be written to disk using [`Bun.write()`](/runtime/file-io#writing-files-bun-write). ```ts const stream: ReadableStream = ...; @@ -16,4 +16,4 @@ await Bun.write(path, response); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. +See [Docs > API > File I/O](/runtime/file-io#writing-files-bun-write) for complete documentation of `Bun.write()`. diff --git a/docs/guides/write-file/unlink.mdx b/docs/guides/write-file/unlink.mdx index d20cc37287..7835f503d2 100644 --- a/docs/guides/write-file/unlink.mdx +++ b/docs/guides/write-file/unlink.mdx @@ -15,4 +15,4 @@ await file.delete(); --- -See [Docs > API > File I/O](https://bun.com/docs/api/file-io#reading-files-bun-file) for complete documentation of `Bun.file()`. +See [Docs > API > File I/O](/runtime/file-io#reading-files-bun-file) for complete documentation of `Bun.file()`. diff --git a/docs/images/templates/bun-nextjs-basic.png b/docs/images/templates/bun-nextjs-basic.png new file mode 100644 index 0000000000..95a1c90370 Binary files /dev/null and b/docs/images/templates/bun-nextjs-basic.png differ diff --git a/docs/images/templates/bun-nextjs-todo.png b/docs/images/templates/bun-nextjs-todo.png new file mode 100644 index 0000000000..4a2d21eded Binary files /dev/null and b/docs/images/templates/bun-nextjs-todo.png differ diff --git a/docs/images/templates/bun-tanstack-basic.png b/docs/images/templates/bun-tanstack-basic.png new file mode 100644 index 0000000000..448e499e88 Binary files /dev/null and b/docs/images/templates/bun-tanstack-basic.png differ diff --git a/docs/images/templates/bun-tanstack-start.png b/docs/images/templates/bun-tanstack-start.png new file mode 100644 index 0000000000..a2c4c5f327 Binary files /dev/null and b/docs/images/templates/bun-tanstack-start.png differ diff --git a/docs/images/templates/bun-tanstack-todo.png b/docs/images/templates/bun-tanstack-todo.png new file mode 100644 index 0000000000..9701e86520 Binary files /dev/null and b/docs/images/templates/bun-tanstack-todo.png differ diff --git a/docs/index.mdx b/docs/index.mdx index f45293b998..471e021af4 100644 --- a/docs/index.mdx +++ b/docs/index.mdx @@ -127,7 +127,7 @@ Bun is designed from the ground-up with today's JavaScript ecosystem in mind. - **Speed**. Bun processes start [4x faster than Node.js](https://twitter.com/jarredsumner/status/1499225725492076544) currently (try it yourself!) - **TypeScript & JSX support**. You can directly execute `.jsx`, `.ts`, and `.tsx` files; Bun's transpiler converts these to vanilla JavaScript before execution. - **ESM & CommonJS compatibility**. The world is moving towards ES modules (ESM), but millions of packages on npm still require CommonJS. Bun recommends ES modules, but supports CommonJS. -- **Web-standard APIs**. Bun implements standard Web APIs like `fetch`, `WebSocket`, and `ReadableStream`. Bun is powered by the JavaScriptCore engine, which is developed by Apple for Safari, so some APIs like [`Headers`](https://developer.mozilla.org/en-US/Web/API/Headers) and [`URL`](https://developer.mozilla.org/en-US/Web/API/URL) directly use [Safari's implementation](https://github.com/oven-sh/bun/blob/HEAD/src/bun.js/bindings/webcore/JSFetchHeaders.cpp). +- **Web-standard APIs**. Bun implements standard Web APIs like `fetch`, `WebSocket`, and `ReadableStream`. Bun is powered by the JavaScriptCore engine, which is developed by Apple for Safari, so some APIs like [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) and [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) directly use [Safari's implementation](https://github.com/oven-sh/bun/blob/HEAD/src/bun.js/bindings/webcore/JSFetchHeaders.cpp). - **Node.js compatibility**. In addition to supporting Node-style module resolution, Bun aims for full compatibility with built-in Node.js globals (`process`, `Buffer`) and modules (`path`, `fs`, `http`, etc.) _This is an ongoing effort that is not complete._ Refer to the [compatibility page](/runtime/nodejs-compat) for the current status. Bun is more than a runtime. The long-term goal is to be a cohesive, infrastructural toolkit for building apps with JavaScript/TypeScript, including a package manager, transpiler, bundler, script runner, test runner, and more. diff --git a/docs/installation.mdx b/docs/installation.mdx index 4f518c3033..99f117bcd6 100644 --- a/docs/installation.mdx +++ b/docs/installation.mdx @@ -1,6 +1,6 @@ --- title: Installation -description: Install Bun +description: Install Bun with npm, Homebrew, Docker, or the official script. --- ## Overview @@ -209,7 +209,7 @@ Since Bun is a single binary, you can install older versions by re-running the i To install a specific version, pass the git tag to the install script: ```bash terminal icon="terminal" - curl -fsSL https://bun.com/install | bash -s "bun-v1.3.2" + curl -fsSL https://bun.com/install | bash -s "bun-v1.3.3" ``` @@ -217,7 +217,7 @@ Since Bun is a single binary, you can install older versions by re-running the i On Windows, pass the version number to the PowerShell install script: ```powershell PowerShell icon="windows" - iex "& {$(irm https://bun.com/install.ps1)} -Version 1.3.2" + iex "& {$(irm https://bun.com/install.ps1)} -Version 1.3.3" ``` diff --git a/docs/normalize-internal-links.js b/docs/normalize-internal-links.js index 3fd5d44504..97a1957b4d 100644 --- a/docs/normalize-internal-links.js +++ b/docs/normalize-internal-links.js @@ -15,7 +15,6 @@ if (element.getAttribute("rel") === "noreferrer") { element.removeAttribute("rel"); } - console.log(`Removed target="_blank" from: ${element.textContent || element.innerHTML.substring(0, 50)}`); } }); }); diff --git a/docs/pm/bunx.mdx b/docs/pm/bunx.mdx index d2299cf9df..b92ef339ad 100644 --- a/docs/pm/bunx.mdx +++ b/docs/pm/bunx.mdx @@ -3,6 +3,8 @@ title: "bunx" description: "Run packages from npm" --- +import Bunx from "/snippets/cli/bunx.mdx"; + `bunx` is an alias for `bun x`. The `bunx` CLI will be auto-installed when you install `bun`. Use `bunx` to auto-install and run packages from `npm`. It's Bun's equivalent of `npx` or `yarn dlx`. @@ -30,7 +32,7 @@ Packages can declare executables in the `"bin"` field of their `package.json`. T These executables are commonly plain JavaScript files marked with a [shebang line]() to indicate which program should be used to execute them. The following file indicates that it should be executed with `node`. -```ts dist/index.js icon="/icons/javascript.svg" +```js dist/index.js icon="/icons/javascript.svg" #!/usr/bin/env node console.log("Hello world!"); @@ -52,6 +54,8 @@ To pass additional command-line flags and arguments through to the executable, p bunx my-cli --foo bar ``` +--- + ## Shebangs By default, Bun respects shebangs. If an executable is marked with `#!/usr/bin/env node`, Bun will spin up a `node` process to execute the file. However, in some cases it may be desirable to run executables using Bun's runtime, even if the executable indicates otherwise. To do so, include the `--bun` flag. @@ -78,6 +82,10 @@ bunx --package @angular/cli ng To force bun to always be used with a script, use a shebang. -```ts dist/index.js icon="/icons/javascript.svg" +```js dist/index.js icon="/icons/javascript.svg" #!/usr/bin/env bun ``` + +--- + + diff --git a/docs/pm/cli/info.mdx b/docs/pm/cli/info.mdx new file mode 100644 index 0000000000..9e4feb1352 --- /dev/null +++ b/docs/pm/cli/info.mdx @@ -0,0 +1,70 @@ +--- +title: "bun info" +description: "Display package metadata from the npm registry" +--- + +`bun info` displays package metadata from the npm registry. + +## Usage + +```bash terminal icon="terminal" +bun info react +``` + +This will display information about the `react` package, including its latest version, description, homepage, dependencies, and more. + +## Viewing specific versions + +To view information about a specific version: + +```bash terminal icon="terminal" +bun info react@18.0.0 +``` + +## Viewing specific properties + +You can also query specific properties from the package metadata: + +```bash terminal icon="terminal" +bun info react version +bun info react dependencies +bun info react repository.url +``` + +## JSON output + +To get the output in JSON format, use the `--json` flag: + +```bash terminal icon="terminal" +bun info react --json +``` + +## Alias + +`bun pm view` is an alias for `bun info`: + +```bash terminal icon="terminal" +bun pm view react # equivalent to: bun info react +``` + +## Examples + +```bash terminal icon="terminal" +# View basic package information +bun info is-number + +# View a specific version +bun info is-number@7.0.0 + +# View all available versions +bun info is-number versions + +# View package dependencies +bun info express dependencies + +# View package homepage +bun info lodash homepage + +# Get JSON output +bun info react --json +``` diff --git a/docs/pm/cli/install.mdx b/docs/pm/cli/install.mdx index e5cfa58836..054e364a91 100644 --- a/docs/pm/cli/install.mdx +++ b/docs/pm/cli/install.mdx @@ -134,14 +134,14 @@ For more information on filtering with `bun install`, refer to [Package Manager Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies. Refer to [Package manager > Overrides and resolutions](/pm/overrides) for complete documentation. -```json package.json file="file-json" +{/* prettier-ignore */} +```json package.json icon="file-json" { "name": "my-app", "dependencies": { "foo": "^2.0.0" }, - "overrides": { - // [!code ++] + "overrides": { // [!code ++] "bar": "~4.4.0" // [!code ++] } // [!code ++] } @@ -304,7 +304,16 @@ For more advanced security scanning, including integration with services & custo ## Configuration -The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below. +### Configuring `bun install` with `bunfig.toml` + +`bunfig.toml` is searched for in the following paths on `bun install`, `bun remove`, and `bun add`: + +1. `$XDG_CONFIG_HOME/.bunfig.toml` or `$HOME/.bunfig.toml` +2. `./bunfig.toml` + +If both are found, the results are merged together. + +Configuring with `bunfig.toml` is optional. Bun tries to be zero configuration in general, but that's not always possible. The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below. ```toml bunfig.toml icon="settings" [install] @@ -345,7 +354,29 @@ minimumReleaseAge = 259200 # seconds minimumReleaseAgeExcludes = ["@types/node", "typescript"] ``` ---- +### Configuring with environment variables + +Environment variables have a higher priority than `bunfig.toml`. + +| Name | Description | +| ---------------------------------- | ------------------------------------------------------------- | +| `BUN_CONFIG_REGISTRY` | Set an npm registry (default: https://registry.npmjs.org) | +| `BUN_CONFIG_TOKEN` | Set an auth token (currently does nothing) | +| `BUN_CONFIG_YARN_LOCKFILE` | Save a Yarn v1-style yarn.lock | +| `BUN_CONFIG_LINK_NATIVE_BINS` | Point `bin` in package.json to a platform-specific dependency | +| `BUN_CONFIG_SKIP_SAVE_LOCKFILE` | Don’t save a lockfile | +| `BUN_CONFIG_SKIP_LOAD_LOCKFILE` | Don’t load a lockfile | +| `BUN_CONFIG_SKIP_INSTALL_PACKAGES` | Don’t install any packages | + +Bun always tries to use the fastest available installation method for the target platform. On macOS, that’s `clonefile` and on Linux, that’s `hardlink`. You can change which installation method is used with the `--backend` flag. When unavailable or on error, `clonefile` and `hardlink` fallsback to a platform-specific implementation of copying files. + +Bun stores installed packages from npm in `~/.bun/install/cache/${name}@${version}`. Note that if the semver version has a `build` or a `pre` tag, it is replaced with a hash of that value instead. This is to reduce the chances of errors from long file paths, but unfortunately complicates figuring out where a package was installed on disk. + +When the `node_modules` folder exists, before installing, Bun checks if the `"name"` and `"version"` in `package/package.json` in the expected node_modules folder matches the expected `name` and `version`. This is how it determines whether it should install. It uses a custom JSON parser which stops parsing as soon as it finds `"name"` and `"version"`. + +When a `bun.lock` doesn’t exist or `package.json` has changed dependencies, tarballs are downloaded & extracted eagerly while resolving. + +When a `bun.lock` exists and `package.json` hasn’t changed, Bun downloads missing dependencies lazily. If the package with a matching `name` & `version` already exists in the expected location within `node_modules`, Bun won’t attempt to download the tarball. ## CI/CD @@ -395,6 +426,94 @@ jobs: run: bun run build ``` +## Platform-specific dependencies? + +bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won't change between platforms/architectures even if the packages ultimately installed do change. + +### `--cpu` and `--os` flags + +You can override the target platform for package selection: + +```bash +bun install --cpu=x64 --os=linux +``` + +This installs packages for the specified platform instead of the current system. Useful for cross-platform builds or when preparing deployments for different environments. + +**Accepted values for `--cpu`**: `arm64`, `x64`, `ia32`, `ppc64`, `s390x` + +**Accepted values for `--os`**: `linux`, `darwin`, `win32`, `freebsd`, `openbsd`, `sunos`, `aix` + +## Peer dependencies? + +Peer dependencies are handled similarly to yarn. `bun install` will automatically install peer dependencies. If the dependency is marked optional in `peerDependenciesMeta`, an existing dependency will be chosen if possible. + +## Lockfile + +`bun.lock` is Bun’s lockfile format. See [our blogpost about the text lockfile](https://bun.com/blog/bun-lock-text-lockfile). + +Prior to Bun 1.2, the lockfile was binary and called `bun.lockb`. Old lockfiles can be upgraded to the new format by running `bun install --save-text-lockfile --frozen-lockfile --lockfile-only`, and then deleting `bun.lockb`. + +## Cache + +To delete the cache: + +```bash +bun pm cache rm +# or +rm -rf ~/.bun/install/cache +``` + +## Platform-specific backends + +`bun install` uses different system calls to install dependencies depending on the platform. This is a performance optimization. You can force a specific backend with the `--backend` flag. + +**`hardlink`** is the default backend on Linux. Benchmarking showed it to be the fastest on Linux. + +```bash +rm -rf node_modules +bun install --backend hardlink +``` + +**`clonefile`** is the default backend on macOS. Benchmarking showed it to be the fastest on macOS. It is only available on macOS. + +```bash +rm -rf node_modules +bun install --backend clonefile +``` + +**`clonefile_each_dir`** is similar to `clonefile`, except it clones each file individually per directory. It is only available on macOS and tends to perform slower than `clonefile`. Unlike `clonefile`, this does not recursively clone subdirectories in one system call. + +```bash +rm -rf node_modules +bun install --backend clonefile_each_dir +``` + +**`copyfile`** is the fallback used when any of the above fail, and is the slowest. on macOS, it uses `fcopyfile()` and on linux it uses `copy_file_range()`. + +```bash +rm -rf node_modules +bun install --backend copyfile +``` + +**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder. + +If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks). + +```bash +rm -rf node_modules +bun install --backend symlink +bun --preserve-symlinks ./my-file.js +node --preserve-symlinks ./my-file.js # https://nodejs.org/api/cli.html#--preserve-symlinks +``` + +## npm registry metadata + +Bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk. +You will see these files in `~/.bun/install/cache/*.npm`. The filename pattern is `${hash(packageName)}.npm`. It’s a hash so that extra directories don’t need to be created for scoped packages. + +Bun's usage of `Cache-Control` ignores `Age`. This improves performance, but means bun may be about 5 minutes out of date to receive the latest package version metadata from npm. + ## pnpm migration Bun automatically migrates projects from pnpm to bun. When a `pnpm-lock.yaml` file is detected and no `bun.lock` file exists, Bun will automatically migrate the lockfile to `bun.lock` during installation. The original `pnpm-lock.yaml` file remains unmodified. diff --git a/docs/pm/cli/link.mdx b/docs/pm/cli/link.mdx index 6515ae30eb..b0b46314aa 100644 --- a/docs/pm/cli/link.mdx +++ b/docs/pm/cli/link.mdx @@ -14,7 +14,7 @@ bun link ``` ```txt -bun link v1.x (7416672e) +bun link v1.3.3 (7416672e) Success! Registered "cool-pkg" To use cool-pkg in a project, run: @@ -43,6 +43,19 @@ In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencie } ``` +## Unlinking + +Use `bun unlink` in the root directory to unregister a local package. + +```bash terminal icon="terminal" +cd /path/to/cool-pkg +bun unlink +``` + +```txt +bun unlink v1.3.3 (7416672e) +``` + --- diff --git a/docs/pm/cli/outdated.mdx b/docs/pm/cli/outdated.mdx index da72e0c490..bf62b03a06 100644 --- a/docs/pm/cli/outdated.mdx +++ b/docs/pm/cli/outdated.mdx @@ -15,7 +15,7 @@ bun outdated | Package | Current | Update | Latest | | ------------------------------ | ------- | --------- | ---------- | | @sinclair/typebox | 0.34.15 | 0.34.16 | 0.34.16 | -| @types/bun (dev) | 1.2.0 | 1.2.23 | 1.2.23 | +| @types/bun (dev) | 1.3.0 | 1.3.3 | 1.3.3 | | eslint (dev) | 8.57.1 | 8.57.1 | 9.20.0 | | eslint-plugin-security (dev) | 2.1.1 | 2.1.1 | 3.0.1 | | eslint-plugin-sonarjs (dev) | 0.23.0 | 0.23.0 | 3.0.1 | @@ -55,7 +55,7 @@ bun outdated eslint-plugin-security eslint-plugin-sonarjs You can also pass glob patterns to check for outdated packages: ```sh terminal icon="terminal" -bun outdated eslint* +bun outdated 'eslint*' ``` ```txt @@ -75,7 +75,7 @@ bun outdated '@types/*' ```txt | Package | Current | Update | Latest | | ------------------ | ------- | ------ | ------ | -| @types/bun (dev) | 1.2.0 | 1.2.23 | 1.2.23 | +| @types/bun (dev) | 1.3.0 | 1.3.3 | 1.3.3 | ``` Or to exclude all `@types/*` packages: diff --git a/docs/pm/cli/pm.mdx b/docs/pm/cli/pm.mdx index d2f1cca971..01b41ebcdd 100644 --- a/docs/pm/cli/pm.mdx +++ b/docs/pm/cli/pm.mdx @@ -248,7 +248,7 @@ bun pm version ``` ```txt -bun pm version v1.3.2 (ca7428e9) +bun pm version v1.3.3 (ca7428e9) Current package version: v1.0.0 Increment: diff --git a/docs/pm/cli/publish.mdx b/docs/pm/cli/publish.mdx index af65bdfd96..c316b22dd4 100644 --- a/docs/pm/cli/publish.mdx +++ b/docs/pm/cli/publish.mdx @@ -13,7 +13,7 @@ bun publish ``` ```txt -bun publish v1.3.2 (ca7428e9) +bun publish v1.3.3 (ca7428e9) packed 203B package.json packed 224B README.md @@ -89,6 +89,14 @@ The `--dry-run` flag can be used to simulate the publish process without actuall bun publish --dry-run ``` +### `--tolerate-republish` + +Exit with code 0 instead of 1 if the package version already exists. Useful in CI/CD where jobs may be re-run. + +```sh terminal icon="terminal" +bun publish --tolerate-republish +``` + ### `--gzip-level` Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`). diff --git a/docs/pm/lifecycle.mdx b/docs/pm/lifecycle.mdx index 0fafcb9b27..4cc4590d1e 100644 --- a/docs/pm/lifecycle.mdx +++ b/docs/pm/lifecycle.mdx @@ -46,6 +46,13 @@ Once added to `trustedDependencies`, install/re-install the package. Bun will re The top 500 npm packages with lifecycle scripts are allowed by default. You can see the full list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt). + + The default trusted dependencies list only applies to packages installed from npm. For packages from other sources + (such as `file:`, `link:`, `git:`, or `github:` dependencies), you must explicitly add them to `trustedDependencies` + to run their lifecycle scripts, even if the package name matches an entry in the default list. This prevents malicious + packages from spoofing trusted package names through local file paths or git repositories. + + --- ## `--ignore-scripts` diff --git a/docs/pm/overrides.mdx b/docs/pm/overrides.mdx index a8e4f32083..cd7a89d5a5 100644 --- a/docs/pm/overrides.mdx +++ b/docs/pm/overrides.mdx @@ -5,14 +5,14 @@ description: "Control metadependency versions with npm overrides and Yarn resolu Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies. +{/* prettier-ignore */} ```json package.json icon="file-json" { "name": "my-app", "dependencies": { "foo": "^2.0.0" }, - "overrides": { - // [!code ++] + "overrides": { // [!code ++] "bar": "~4.4.0" // [!code ++] } // [!code ++] } @@ -50,14 +50,14 @@ Add `bar` to the `"overrides"` field in `package.json`. Bun will defer to the sp overrides](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#overrides) are not supported. +{/* prettier-ignore */} ```json package.json icon="file-json" { "name": "my-app", "dependencies": { "foo": "^2.0.0" }, - "overrides": { - // [!code ++] + "overrides": { // [!code ++] "bar": "~4.4.0" // [!code ++] } // [!code ++] } @@ -69,14 +69,14 @@ The syntax is similar for `"resolutions"`, which is Yarn's alternative to `"over As with `"overrides"`, _nested resolutions_ are not currently supported. +{/* prettier-ignore */} ```json package.json icon="file-json" { "name": "my-app", "dependencies": { "foo": "^2.0.0" }, - "resolutions": { - // [!code ++] + "resolutions": { // [!code ++] "bar": "~4.4.0" // [!code ++] } // [!code ++] } diff --git a/docs/pm/workspaces.mdx b/docs/pm/workspaces.mdx index 6570597dd4..961037914a 100644 --- a/docs/pm/workspaces.mdx +++ b/docs/pm/workspaces.mdx @@ -30,7 +30,7 @@ It's common for a monorepo to have the following structure: In the root `package.json`, the `"workspaces"` key is used to indicate which subdirectories should be considered packages/workspaces within the monorepo. It's conventional to place all the workspace in a directory called `packages`. -```json +```json package.json icon="file-json" { "name": "my-project", "version": "1.0.0", @@ -42,14 +42,21 @@ In the root `package.json`, the `"workspaces"` key is used to indicate which sub ``` - **Glob support** — Bun supports full glob syntax in `"workspaces"` (see [here](/runtime/glob#supported-glob-patterns) - for a comprehensive list of supported syntax), _except_ for exclusions (e.g. `!**/excluded/**`), which are not - implemented yet. + **Glob support** — Bun supports full glob syntax in `"workspaces"`, including negative patterns (e.g. + `!**/excluded/**`). See [here](/runtime/glob#supported-glob-patterns) for a comprehensive list of supported syntax. +```json package.json icon="file-json" +{ + "name": "my-project", + "version": "1.0.0", + "workspaces": ["packages/**", "!packages/**/test/**", "!packages/**/template/**"] +} +``` + Each workspace has it's own `package.json`. When referencing other packages in the monorepo, semver or workspace protocols (e.g. `workspace:*`) can be used as the version field in your `package.json`. -```json +```json packages/pkg-a/package.json icon="file-json" { "name": "pkg-a", "version": "1.0.0", diff --git a/docs/project/building-windows.mdx b/docs/project/building-windows.mdx index 59cb8f788e..a5541ac6cc 100644 --- a/docs/project/building-windows.mdx +++ b/docs/project/building-windows.mdx @@ -22,7 +22,7 @@ By default, running unverified scripts are blocked. Bun v1.1 or later. We use Bun to run it's own code generators. ```ps1 -> irm bun.com/install.ps1 | iex +> irm bun.sh/install.ps1 | iex ``` [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload. While installing, make sure to install Git as well, if Git for Windows is not already installed. @@ -49,7 +49,7 @@ After Visual Studio, you need the following: ```ps1 Scoop > irm https://get.scoop.sh | iex -> scoop install nodejs-lts go rust nasm ruby perl sccache +> scoop install nodejs-lts go rust nasm ruby perl ccache # scoop seems to be buggy if you install llvm and the rest at the same time > scoop install llvm@19.1.7 ``` diff --git a/docs/project/contributing.mdx b/docs/project/contributing.mdx index 31a0c511bc..fdc18baca8 100644 --- a/docs/project/contributing.mdx +++ b/docs/project/contributing.mdx @@ -7,7 +7,21 @@ Configuring a development environment for Bun can take 10-30 minutes depending o If you are using Windows, please refer to [this guide](/project/building-windows) -## Install Dependencies +## Using Nix (Alternative) + +A Nix flake is provided as an alternative to manual dependency installation: + +```bash +nix develop +# or explicitly use the pure shell +# nix develop .#pure +export CMAKE_SYSTEM_PROCESSOR=$(uname -m) +bun bd +``` + +This provides all dependencies in an isolated, reproducible environment without requiring sudo. + +## Install Dependencies (Manual) Using your system's package manager, install Bun's dependencies: @@ -18,15 +32,15 @@ $ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ``` ```bash Ubuntu/Debian -$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils +$ sudo apt install curl wget lsb-release software-properties-common cargo cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils ``` ```bash Arch -$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby +$ sudo pacman -S base-devel cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby ``` ```bash Fedora -$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)' +$ sudo dnf install cargo clang19 llvm19 lld19 cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)' ``` ```bash openSUSE Tumbleweed @@ -56,6 +70,29 @@ $ brew install bun +### Optional: Install `ccache` + +ccache is used to cache compilation artifacts, significantly speeding up builds: + +```bash +# For macOS +$ brew install ccache + +# For Ubuntu/Debian +$ sudo apt install ccache + +# For Arch +$ sudo pacman -S ccache + +# For Fedora +$ sudo dnf install ccache + +# For openSUSE +$ sudo zypper install ccache +``` + +Our build scripts will automatically detect and use `ccache` if available. You can check cache statistics with `ccache --show-stats`. + ## Install LLVM Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager: @@ -156,7 +193,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi - Batch up your changes - Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work) - Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code. -- Use debug logs. `BUN_DEBUG_=1` will enable debug logging for the corresponding `Output.scoped(., false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=.log`. Debug logs are aggressively removed in release builds. +- Use debug logs. `BUN_DEBUG_=1` will enable debug logging for the corresponding `Output.scoped(., .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug logs into a file, `BUN_DEBUG=.log`. Debug logs are aggressively removed in release builds. - src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many). ## Code generation scripts @@ -327,15 +364,6 @@ bun run build -DUSE_STATIC_LIBATOMIC=OFF The built version of Bun may not work on other systems if compiled this way. -### ccache conflicts with building TinyCC on macOS - -If you run into issues with `ccache` when building TinyCC, try reinstalling ccache - -```bash -brew uninstall ccache -brew install ccache -``` - ## Using bun-debug - Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging) diff --git a/docs/project/license.mdx b/docs/project/license.mdx index 679ff7be84..fa51203c26 100644 --- a/docs/project/license.mdx +++ b/docs/project/license.mdx @@ -11,7 +11,7 @@ Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCo > (1) If you statically link against an LGPL'd library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application. -You can find the patched version of WebKit used by Bun here: [https://github.com/oven-sh/webkit](https://github.com/oven-sh/webkit). If you would like to relink Bun with changes: +You can find the patched version of WebKit used by Bun here: https://github.com/oven-sh/webkit. If you would like to relink Bun with changes: - `git submodule update --init --recursive` - `make jsc` diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 21509db9f7..6781199996 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -92,7 +92,7 @@ Build a minimal HTTP server with `Bun.serve`, run it locally, then evolve it by Then add the following to your `compilerOptions` in `tsconfig.json`: - ```json tsconfig.json icon="file-code" + ```json tsconfig.json icon="file-json" { "compilerOptions": { "lib": ["ESNext"], @@ -219,16 +219,21 @@ Build a minimal HTTP server with `Bun.serve`, run it locally, then evolve it by Bun can also execute `"scripts"` from your `package.json`. Add the following script: -```json package.json icon="file-code" +{/* prettier-ignore */} +```json package.json icon="file-json" { "name": "quickstart", "module": "index.ts", "type": "module", - "scripts": { - "start": "bun run index.ts" - }, + "private": true, + "scripts": { // [!code ++] + "start": "bun run index.ts" // [!code ++] + }, // [!code ++] "devDependencies": { "@types/bun": "latest" + }, + "peerDependencies": { + "typescript": "^5" } } ``` diff --git a/docs/runtime/bunfig.mdx b/docs/runtime/bunfig.mdx index 57fb1805f0..a4258d8377 100644 --- a/docs/runtime/bunfig.mdx +++ b/docs/runtime/bunfig.mdx @@ -107,12 +107,34 @@ Bun supports the following loaders: ### `telemetry` -The `telemetry` field permit to enable/disable the analytics records. Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. By default the telemetry is enabled. Equivalent of `DO_NOT_TRACK` env variable. +The `telemetry` field is used to enable/disable analytics. By default, telemetry is enabled. This is equivalent to the `DO_NOT_TRACK` environment variable. + +Currently we do not collect telemetry and this setting is only used for enabling/disabling anonymous crash reports, but in the future we plan to collect information like which Bun APIs are used most or how long `bun build` takes. ```toml title="bunfig.toml" icon="settings" telemetry = false ``` +### `env` + +Configure automatic `.env` file loading. By default, Bun automatically loads `.env` files. To disable this behavior: + +```toml title="bunfig.toml" icon="settings" +# Disable automatic .env file loading +env = false +``` + +You can also use object syntax with the `file` property: + +```toml title="bunfig.toml" icon="settings" +[env] +file = false +``` + +This is useful in production environments or CI/CD pipelines where you want to rely solely on system environment variables. + +Note: Explicitly provided environment files via `--env-file` will still be loaded even when default loading is disabled. + ### `console` Configure console output behavior. @@ -276,6 +298,58 @@ This is useful for catching flaky tests or non-deterministic behavior. Each test The `--rerun-each` CLI flag will override this setting when specified. +### `test.concurrentTestGlob` + +Specify a glob pattern to automatically run matching test files with concurrent test execution enabled. Test files matching this pattern will behave as if the `--concurrent` flag was passed, running all tests within those files concurrently. + +```toml title="bunfig.toml" icon="settings" +[test] +concurrentTestGlob = "**/concurrent-*.test.ts" +``` + +This is useful for: + +- Gradually migrating test suites to concurrent execution +- Running integration tests concurrently while keeping unit tests sequential +- Separating fast concurrent tests from tests that require sequential execution + +The `--concurrent` CLI flag will override this setting when specified. + +### `test.onlyFailures` + +When enabled, only failed tests are displayed in the output. This helps reduce noise in large test suites by hiding passing tests. Default `false`. + +```toml title="bunfig.toml" icon="settings" +[test] +onlyFailures = true +``` + +This is equivalent to using the `--only-failures` flag when running `bun test`. + +### `test.reporter` + +Configure the test reporter settings. + +#### `test.reporter.dots` + +Enable the dots reporter, which displays a compact output showing a dot for each test. Default `false`. + +```toml title="bunfig.toml" icon="settings" +[test.reporter] +dots = true +``` + +#### `test.reporter.junit` + +Enable JUnit XML reporting and specify the output file path. + +```toml title="bunfig.toml" icon="settings" +[test.reporter] +junit = "test-results.xml" +``` + +This generates a JUnit XML report that can be consumed by CI systems and other tools. + ## Package manager Package management is a complex issue; to support a range of use cases, the behavior of `bun install` can be configured under the `[install]` section. @@ -400,8 +474,8 @@ To configure the directory where Bun installs globally installed binaries and CL Environment variable: `BUN_INSTALL_BIN` ```toml title="bunfig.toml" icon="settings" -# where globally-installed package bins are linked [install] +# where globally-installed package bins are linked globalBinDir = "~/.bun/bin" ``` @@ -533,6 +607,32 @@ editor = "code" # - "emacs" ``` +### `install.security.scanner` + +Configure a security scanner to scan packages for vulnerabilities before installation. + +First, install a security scanner from npm: + +```bash terminal icon="terminal" +bun add -d @acme/bun-security-scanner +``` + +Then configure it in your `bunfig.toml`: + +```toml bunfig.toml icon="settings" +[install.security] +scanner = "@acme/bun-security-scanner" +``` + +When a security scanner is configured: + +- Auto-install is automatically disabled for security +- Packages are scanned before installation +- Installation is cancelled if fatal issues are found +- Security warnings are displayed during installation + +Learn more about [using and writing security scanners](/pm/security-scanner-api). + ### `install.minimumReleaseAge` Configure a minimum age (in seconds) for npm package versions. Package versions published more recently than this threshold will be filtered out during installation. Default is `null` (disabled). @@ -551,7 +651,7 @@ For more details see [Minimum release age](/pm/cli/install#minimum-release-age) The `bun run` command can be configured under the `[run]` section. These apply to the `bun run` command and the `bun` command when running a file or executable or script. -Currently, `bunfig.toml` isn't always automatically loaded for `bun run` in a local project (it does check for a global `bunfig.toml`), so you might still need to pass `-c` or `-c=bunfig.toml` to use these settings. +Currently, `bunfig.toml` is only automatically loaded for `bun run` in a local project (it doesn't check for a global `.bunfig.toml`). ### `run.shell` - use the system shell or Bun's shell diff --git a/docs/runtime/c-compiler.mdx b/docs/runtime/c-compiler.mdx index 14c0e27216..516fc3efbd 100644 --- a/docs/runtime/c-compiler.mdx +++ b/docs/runtime/c-compiler.mdx @@ -13,7 +13,7 @@ See the [introduction blog post](https://bun.com/blog/compile-and-run-c-in-js) f JavaScript: -```ts hello.js icon="file-code" +```ts hello.ts icon="file-code" import { cc } from "bun:ffi"; import source from "./hello.c" with { type: "file" }; @@ -87,7 +87,7 @@ You can also pass a `napi_env` to receive the N-API environment used to call the For example, if you have a string in C, you can return it to JavaScript like this: -```ts hello.js +```ts hello.ts import { cc } from "bun:ffi"; import source from "./hello.c" with { type: "file" }; diff --git a/docs/runtime/child-process.mdx b/docs/runtime/child-process.mdx index 6be68a6f27..4ee5441234 100644 --- a/docs/runtime/child-process.mdx +++ b/docs/runtime/child-process.mdx @@ -100,7 +100,7 @@ You can read results from the subprocess via the `stdout` and `stderr` propertie ```ts const proc = Bun.spawn(["bun", "--version"]); const text = await proc.stdout.text(); -console.log(text); // => "1.3.2\n" +console.log(text); // => "1.3.3\n" ``` Configure the output stream by passing one of the following values to `stdout/stderr`: @@ -289,7 +289,7 @@ childProc.disconnect(); To use IPC between a `bun` process and a Node.js process, set `serialization: "json"` in `Bun.spawn`. This is because Node.js and Bun use different JavaScript engines with different object serialization formats. -```ts bun-node-ipc.js icon="file-code" +```js bun-node-ipc.js icon="file-code" if (typeof Bun !== "undefined") { const prefix = `[bun ${process.versions.bun} 🐇]`; const node = Bun.spawn({ @@ -315,6 +315,109 @@ if (typeof Bun !== "undefined") { --- +## Terminal (PTY) support + +For interactive terminal applications, you can spawn a subprocess with a pseudo-terminal (PTY) attached using the `terminal` option. This makes the subprocess think it's running in a real terminal, enabling features like colored output, cursor movement, and interactive prompts. + +```ts +const proc = Bun.spawn(["bash"], { + terminal: { + cols: 80, + rows: 24, + data(terminal, data) { + // Called when data is received from the terminal + process.stdout.write(data); + }, + }, +}); + +// Write to the terminal +proc.terminal.write("echo hello\n"); + +// Wait for the process to exit +await proc.exited; + +// Close the terminal +proc.terminal.close(); +``` + +When the `terminal` option is provided: + +- The subprocess sees `process.stdout.isTTY` as `true` +- `stdin`, `stdout`, and `stderr` are all connected to the terminal +- `proc.stdin`, `proc.stdout`, and `proc.stderr` return `null` — use the terminal instead +- Access the terminal via `proc.terminal` + +### Terminal options + +| Option | Description | Default | +| ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------ | +| `cols` | Number of columns | `80` | +| `rows` | Number of rows | `24` | +| `name` | Terminal type for PTY configuration (set `TERM` env var separately via `env` option) | `"xterm-256color"` | +| `data` | Callback when data is received `(terminal, data) => void` | — | +| `exit` | Callback when PTY stream closes (EOF or error). `exitCode` is PTY lifecycle status (0=EOF, 1=error), not subprocess exit code. Use `proc.exited` for process exit. | — | +| `drain` | Callback when ready for more data `(terminal) => void` | — | + +### Terminal methods + +The `Terminal` object returned by `proc.terminal` has the following methods: + +```ts +// Write data to the terminal +proc.terminal.write("echo hello\n"); + +// Resize the terminal +proc.terminal.resize(120, 40); + +// Set raw mode (disable line buffering and echo) +proc.terminal.setRawMode(true); + +// Keep event loop alive while terminal is open +proc.terminal.ref(); +proc.terminal.unref(); + +// Close the terminal +proc.terminal.close(); +``` + +### Reusable Terminal + +You can create a terminal independently and reuse it across multiple subprocesses: + +```ts +await using terminal = new Bun.Terminal({ + cols: 80, + rows: 24, + data(term, data) { + process.stdout.write(data); + }, +}); + +// Spawn first process +const proc1 = Bun.spawn(["echo", "first"], { terminal }); +await proc1.exited; + +// Reuse terminal for another process +const proc2 = Bun.spawn(["echo", "second"], { terminal }); +await proc2.exited; + +// Terminal is closed automatically by `await using` +``` + +When passing an existing `Terminal` object: + +- The terminal can be reused across multiple spawns +- You control when to close the terminal +- The `exit` callback fires when you call `terminal.close()`, not when each subprocess exits +- Use `proc.exited` to detect individual subprocess exits + +This is useful for running multiple commands in sequence through the same terminal session. + +Terminal support is only available on POSIX systems (Linux, macOS). It is not available on Windows. + +--- + ## Blocking API (`Bun.spawnSync()`) Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways. @@ -407,6 +510,7 @@ namespace SpawnOptions { timeout?: number; killSignal?: string | number; maxBuffer?: number; + terminal?: TerminalOptions; // PTY support (POSIX only) } type Readable = @@ -435,10 +539,11 @@ namespace SpawnOptions { } interface Subprocess extends AsyncDisposable { - readonly stdin: FileSink | number | undefined; - readonly stdout: ReadableStream | number | undefined; - readonly stderr: ReadableStream | number | undefined; - readonly readable: ReadableStream | number | undefined; + readonly stdin: FileSink | number | undefined | null; + readonly stdout: ReadableStream> | number | undefined | null; + readonly stderr: ReadableStream> | number | undefined | null; + readonly readable: ReadableStream> | number | undefined | null; + readonly terminal: Terminal | undefined; readonly pid: number; readonly exited: Promise; readonly exitCode: number | null; @@ -465,6 +570,28 @@ interface SyncSubprocess { pid: number; } +interface TerminalOptions { + cols?: number; + rows?: number; + name?: string; + data?: (terminal: Terminal, data: Uint8Array) => void; + /** Called when PTY stream closes (EOF or error). exitCode is PTY lifecycle status (0=EOF, 1=error), not subprocess exit code. */ + exit?: (terminal: Terminal, exitCode: number, signal: string | null) => void; + drain?: (terminal: Terminal) => void; +} + +interface Terminal extends AsyncDisposable { + readonly stdin: number; + readonly stdout: number; + readonly closed: boolean; + write(data: string | BufferSource): number; + resize(cols: number, rows: number): void; + setRawMode(enabled: boolean): void; + ref(): void; + unref(): void; + close(): void; +} + interface ResourceUsage { contextSwitches: { voluntary: number; diff --git a/docs/runtime/cookies.mdx b/docs/runtime/cookies.mdx index 641dfc7989..0d90bd0420 100644 --- a/docs/runtime/cookies.mdx +++ b/docs/runtime/cookies.mdx @@ -9,7 +9,7 @@ Bun provides native APIs for working with HTTP cookies through `Bun.Cookie` and `Bun.CookieMap` provides a Map-like interface for working with collections of cookies. It implements the `Iterable` interface, allowing you to use it with `for...of` loops and other iteration methods. -```ts filename="cookies.ts" icon="/icons/typescript.svg" +```ts title="cookies.ts" icon="/icons/typescript.svg" // Empty cookie map const cookies = new Bun.CookieMap(); @@ -33,7 +33,7 @@ const cookies3 = new Bun.CookieMap([ In Bun's HTTP server, the `cookies` property on the request object (in `routes`) is an instance of `CookieMap`: -```ts filename="server.ts" icon="/icons/typescript.svg" +```ts title="server.ts" icon="/icons/typescript.svg" const server = Bun.serve({ routes: { "/": req => { @@ -68,7 +68,7 @@ console.log("Server listening at: " + server.url); Retrieves a cookie by name. Returns `null` if the cookie doesn't exist. -```ts filename="get-cookie.ts" icon="/icons/typescript.svg" +```ts title="get-cookie.ts" icon="/icons/typescript.svg" // Get by name const cookie = cookies.get("session"); @@ -81,7 +81,7 @@ if (cookie != null) { Checks if a cookie with the given name exists. -```ts filename="has-cookie.ts" icon="/icons/typescript.svg" +```ts title="has-cookie.ts" icon="/icons/typescript.svg" // Check if cookie exists if (cookies.has("session")) { // Cookie exists @@ -96,7 +96,7 @@ if (cookies.has("session")) { Adds or updates a cookie in the map. Cookies default to `{ path: "/", sameSite: "lax" }`. -```ts filename="set-cookie.ts" icon="/icons/typescript.svg" +```ts title="set-cookie.ts" icon="/icons/typescript.svg" // Set by name and value cookies.set("session", "abc123"); @@ -119,7 +119,7 @@ cookies.set(cookie); Removes a cookie from the map. When applied to a Response, this adds a cookie with an empty string value and an expiry date in the past. A cookie will only delete successfully on the browser if the domain and path is the same as it was when the cookie was created. -```ts filename="delete-cookie.ts" icon="/icons/typescript.svg" +```ts title="delete-cookie.ts" icon="/icons/typescript.svg" // Delete by name using default domain and path. cookies.delete("session"); @@ -135,7 +135,7 @@ cookies.delete({ Converts the cookie map to a serializable format. -```ts filename="cookie-to-json.ts" icon="/icons/typescript.svg" +```ts title="cookie-to-json.ts" icon="/icons/typescript.svg" const json = cookies.toJSON(); ``` @@ -145,7 +145,7 @@ Returns an array of values for Set-Cookie headers that can be used to apply all When using `Bun.serve()`, you don't need to call this method explicitly. Any changes made to the `req.cookies` map are automatically applied to the response headers. This method is primarily useful when working with other HTTP server implementations. -```ts filename="node-server.js" icon="file-code" +```js title="node-server.js" icon="file-code" import { createServer } from "node:http"; import { CookieMap } from "bun"; @@ -172,7 +172,7 @@ server.listen(3000, () => { `CookieMap` provides several methods for iteration: -```ts filename="iterate-cookies.ts" icon="/icons/typescript.svg" +```ts title="iterate-cookies.ts" icon="/icons/typescript.svg" // Iterate over [name, cookie] entries for (const [name, value] of cookies) { console.log(`${name}: ${value}`); @@ -205,7 +205,7 @@ cookies.forEach((value, name) => { Returns the number of cookies in the map. -```ts filename="cookie-size.ts" icon="/icons/typescript.svg" +```ts title="cookie-size.ts" icon="/icons/typescript.svg" console.log(cookies.size); // Number of cookies ``` @@ -213,7 +213,7 @@ console.log(cookies.size); // Number of cookies `Bun.Cookie` represents an HTTP cookie with its name, value, and attributes. -```ts filename="cookie-class.ts" icon="/icons/typescript.svg" +```ts title="cookie-class.ts" icon="/icons/typescript.svg" import { Cookie } from "bun"; // Create a basic cookie @@ -243,7 +243,7 @@ const objCookie = new Bun.Cookie({ ### Constructors -```ts filename="constructors.ts" icon="/icons/typescript.svg" +```ts title="constructors.ts" icon="/icons/typescript.svg" // Basic constructor with name/value new Bun.Cookie(name: string, value: string); @@ -259,7 +259,7 @@ new Bun.Cookie(options: CookieInit); ### Properties -```ts filename="cookie-properties.ts" icon="/icons/typescript.svg" +```ts title="cookie-properties.ts" icon="/icons/typescript.svg" cookie.name; // string - Cookie name cookie.value; // string - Cookie value cookie.domain; // string | null - Domain scope (null if not specified) @@ -278,7 +278,7 @@ cookie.httpOnly; // boolean - Accessible only via HTTP (not JavaScript) Checks if the cookie has expired. -```ts filename="is-expired.ts" icon="/icons/typescript.svg" +```ts title="is-expired.ts" icon="/icons/typescript.svg" // Expired cookie (Date in the past) const expiredCookie = new Bun.Cookie("name", "value", { expires: new Date(Date.now() - 1000), @@ -302,7 +302,7 @@ console.log(sessionCookie.isExpired()); // false Returns a string representation of the cookie suitable for a `Set-Cookie` header. -```ts filename="serialize-cookie.ts" icon="/icons/typescript.svg" +```ts title="serialize-cookie.ts" icon="/icons/typescript.svg" const cookie = new Bun.Cookie("session", "abc123", { domain: "example.com", path: "/admin", @@ -322,7 +322,7 @@ console.log(cookie.toString()); Converts the cookie to a plain object suitable for JSON serialization. -```ts filename="cookie-json.ts" icon="/icons/typescript.svg" +```ts title="cookie-json.ts" icon="/icons/typescript.svg" const cookie = new Bun.Cookie("session", "abc123", { secure: true, httpOnly: true, @@ -349,7 +349,7 @@ const jsonString = JSON.stringify(cookie); Parses a cookie string into a `Cookie` instance. -```ts filename="parse-cookie.ts" icon="/icons/typescript.svg" +```ts title="parse-cookie.ts" icon="/icons/typescript.svg" const cookie = Bun.Cookie.parse("name=value; Path=/; Secure; SameSite=Lax"); console.log(cookie.name); // "name" @@ -363,7 +363,7 @@ console.log(cookie.sameSite); // "lax" Factory method to create a cookie. -```ts filename="cookie-from.ts" icon="/icons/typescript.svg" +```ts title="cookie-from.ts" icon="/icons/typescript.svg" const cookie = Bun.Cookie.from("session", "abc123", { httpOnly: true, secure: true, @@ -373,7 +373,7 @@ const cookie = Bun.Cookie.from("session", "abc123", { ## Types -```ts filename="types.ts" icon="/icons/typescript.svg" +```ts title="types.ts" icon="/icons/typescript.svg" interface CookieInit { name?: string; value?: string; diff --git a/docs/runtime/debugger.mdx b/docs/runtime/debugger.mdx index c718f2632f..820a1a43d1 100644 --- a/docs/runtime/debugger.mdx +++ b/docs/runtime/debugger.mdx @@ -115,7 +115,7 @@ Here's a cheat sheet explaining the functions of the control flow buttons. ### Visual Studio Code Debugger -Experimental support for debugging Bun scripts is available in Visual Studio Code. To use it, you'll need to install the [Bun VSCode extension](https://bun.com/guides/runtime/vscode-debugger). +Experimental support for debugging Bun scripts is available in Visual Studio Code. To use it, you'll need to install the [Bun VSCode extension](/guides/runtime/vscode-debugger). --- @@ -146,11 +146,11 @@ await fetch("https://example.com", { ``` ```txt -[fetch] $ curl --http1.1 "https://example.com/" -X POST -H "content-type: application/json" -H "Connection: keep-alive" -H "User-Agent: Bun/1.3.2" -H "Accept: */*" -H "Host: example.com" -H "Accept-Encoding: gzip, deflate, br" --compressed -H "Content-Length: 13" --data-raw "{\"foo\":\"bar\"}" +[fetch] $ curl --http1.1 "https://example.com/" -X POST -H "content-type: application/json" -H "Connection: keep-alive" -H "User-Agent: Bun/1.3.3" -H "Accept: */*" -H "Host: example.com" -H "Accept-Encoding: gzip, deflate, br" --compressed -H "Content-Length: 13" --data-raw "{\"foo\":\"bar\"}" [fetch] > HTTP/1.1 POST https://example.com/ [fetch] > content-type: application/json [fetch] > Connection: keep-alive -[fetch] > User-Agent: Bun/1.3.2 +[fetch] > User-Agent: Bun/1.3.3 [fetch] > Accept: */* [fetch] > Host: example.com [fetch] > Accept-Encoding: gzip, deflate, br @@ -190,7 +190,7 @@ await fetch("https://example.com", { [fetch] > HTTP/1.1 POST https://example.com/ [fetch] > content-type: application/json [fetch] > Connection: keep-alive -[fetch] > User-Agent: Bun/1.3.2 +[fetch] > User-Agent: Bun/1.3.3 [fetch] > Accept: */* [fetch] > Host: example.com [fetch] > Accept-Encoding: gzip, deflate, br diff --git a/docs/runtime/environment-variables.mdx b/docs/runtime/environment-variables.mdx index c1b38edc1b..087da9ef56 100644 --- a/docs/runtime/environment-variables.mdx +++ b/docs/runtime/environment-variables.mdx @@ -13,7 +13,7 @@ Bun reads the following files automatically (listed in order of increasing prece - `.env.production`, `.env.development`, `.env.test` (depending on value of `NODE_ENV`) - `.env.local` -```txt .env icon="settings" +```ini .env icon="settings" FOO=hello BAR=world ``` @@ -46,7 +46,7 @@ bun exec 'FOO=helloworld bun run dev' On Windows, `package.json` scripts called with `bun run` will automatically use the **bun shell**, making the following also cross-platform. -```json package.json +```json package.json icon="file-json" "scripts": { "dev": "NODE_ENV=development bun --watch app.ts", }, @@ -72,13 +72,30 @@ bun --env-file=.env.1 src/index.ts bun --env-file=.env.abc --env-file=.env.def run build ``` +## Disabling automatic `.env` loading + +Use `--no-env-file` to disable Bun's automatic `.env` file loading. This is useful in production environments or CI/CD pipelines where you want to rely solely on system environment variables. + +```sh +bun run --no-env-file index.ts +``` + +This can also be configured in `bunfig.toml`: + +```toml bunfig.toml icon="settings" +# Disable loading .env files +env = false +``` + +Explicitly provided environment files via `--env-file` will still be loaded even when default loading is disabled. + --- ## Quotation marks Bun supports double quotes, single quotes, and template literal backticks: -```txt .env icon="settings" +```ini .env icon="settings" FOO='hello' FOO="hello" FOO=`hello` @@ -88,7 +105,7 @@ FOO=`hello` Environment variables are automatically _expanded_. This means you can reference previously-defined variables in your environment variables. -```txt .env icon="settings" +```ini .env icon="settings" FOO=world BAR=hello$FOO ``` @@ -99,7 +116,7 @@ process.env.BAR; // => "helloworld" This is useful for constructing connection strings or other compound values. -```txt .env icon="settings" +```ini .env icon="settings" DB_USER=postgres DB_PASSWORD=secret DB_HOST=localhost @@ -109,7 +126,7 @@ DB_URL=postgres://$DB_USER:$DB_PASSWORD@$DB_HOST:$DB_PORT/$DB_NAME This can be disabled by escaping the `$` with a backslash. -```txt .env icon="settings" +```ini .env icon="settings" FOO=world BAR=hello\$FOO ``` diff --git a/docs/runtime/html-rewriter.mdx b/docs/runtime/html-rewriter.mdx index cdddae9e1a..5e05ee0706 100644 --- a/docs/runtime/html-rewriter.mdx +++ b/docs/runtime/html-rewriter.mdx @@ -46,29 +46,22 @@ console.log(result); This replaces all images with a thumbnail of Rick Astley and wraps each `` in a link, producing a diff like this: +{/* prettier-ignore */} ```html - // [!code --] // [!code --] - // [!code --] - - // [!code ++] - Definitely not a rickroll - // [!code ++] - - // [!code ++] - - // [!code ++] - Definitely not a rickroll - // [!code ++] - - // [!code ++] - - // [!code ++] - Definitely not a rickroll - // [!code ++] - - // [!code ++] + + + + + Definitely not a rickroll + + + Definitely not a rickroll + + + Definitely not a rickroll + ``` diff --git a/docs/runtime/http/routing.mdx b/docs/runtime/http/routing.mdx index 06af0a2cbb..418e421866 100644 --- a/docs/runtime/http/routing.mdx +++ b/docs/runtime/http/routing.mdx @@ -102,7 +102,7 @@ Bun.serve({ TypeScript parses route parameters when passed as a string literal, so that your editor will show autocomplete when accessing `request.params`. -```ts title="index.ts" +```ts title="index.ts" icon="/icons/typescript.svg" import type { BunRequest } from "bun"; Bun.serve({ @@ -283,7 +283,7 @@ You can also access the `Server` object from the `fetch` handler. It's the secon const server = Bun.serve({ fetch(req, server) { const ip = server.requestIP(req); - return new Response(`Your IP is ${ip}`); + return new Response(`Your IP is ${ip.address}`); }, }); ``` diff --git a/docs/runtime/http/server.mdx b/docs/runtime/http/server.mdx index f230f1e7ea..432117e66f 100644 --- a/docs/runtime/http/server.mdx +++ b/docs/runtime/http/server.mdx @@ -32,12 +32,8 @@ const server = Bun.serve({ // Redirect from /blog/hello to /blog/hello/world "/blog/hello": Response.redirect("/blog/hello/world"), - // Serve a file by buffering it in memory - "/favicon.ico": new Response(await Bun.file("./favicon.ico").bytes(), { - headers: { - "Content-Type": "image/x-icon", - }, - }), + // Serve a file by lazily loading it into memory + "/favicon.ico": Bun.file("./favicon.ico"), }, // (optional) fallback for unmatched routes: @@ -126,7 +122,7 @@ bun --port=4002 server.ts - `BUN_PORT` environment variable ```sh -bun_PORT=4002 bun server.ts +BUN_PORT=4002 bun server.ts ``` - `PORT` environment variable @@ -197,15 +193,17 @@ This is the maximum amount of time a connection is allowed to be idle before the Thus far, the examples on this page have used the explicit `Bun.serve` API. Bun also supports an alternate syntax. ```ts server.ts -import { type Serve } from "bun"; +import type { Serve } from "bun"; export default { fetch(req) { return new Response("Bun!"); }, -} satisfies Serve; +} satisfies Serve.Options; ``` +The type parameter `` represents WebSocket data — if you add a `websocket` handler with custom data attached via `server.upgrade(req, { data: ... })`, replace `undefined` with your data type. + Instead of passing the server options into `Bun.serve`, `export default` it. This file can be executed as-is; when Bun sees a file with a `default` export containing a `fetch` handler, it passes it into `Bun.serve` under the hood. --- diff --git a/docs/runtime/http/websockets.mdx b/docs/runtime/http/websockets.mdx index 174043200d..657da9f40b 100644 --- a/docs/runtime/http/websockets.mdx +++ b/docs/runtime/http/websockets.mdx @@ -107,13 +107,13 @@ Bun.serve({ Once the upgrade succeeds, Bun will send a `101 Switching Protocols` response per the [spec](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism). Additional `headers` can be attached to this `Response` in the call to `server.upgrade()`. +{/* prettier-ignore */} ```ts server.ts icon="/icons/typescript.svg" Bun.serve({ fetch(req, server) { const sessionId = await generateSessionId(); server.upgrade(req, { - headers: { - // [!code ++] + headers: { // [!code ++] "Set-Cookie": `SessionId=${sessionId}`, // [!code ++] }, // [!code ++] }); @@ -126,6 +126,8 @@ Bun.serve({ Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. This data is made available on the `ws.data` property inside the WebSocket handlers. +To strongly type `ws.data`, add a `data` property to the `websocket` handler object. This types `ws.data` across all lifecycle hooks. + ```ts server.ts icon="/icons/typescript.svg" type WebSocketData = { createdAt: number; @@ -166,9 +168,13 @@ Bun.serve({ }); ``` + +**Note:** Previously, you could specify the type of `ws.data` using a type parameter on `Bun.serve`, like `Bun.serve({...})`. This pattern was removed due to [a limitation in TypeScript](https://github.com/microsoft/TypeScript/issues/26242) in favor of the `data` property shown above. + + To connect to this server from the browser, create a new `WebSocket`. -```ts browser.js icon="file-code" +```js browser.js icon="file-code" const socket = new WebSocket("ws://localhost:3000/chat"); socket.addEventListener("message", event => { diff --git a/docs/runtime/index.mdx b/docs/runtime/index.mdx index 7ffa1d0a9b..9a01aa2e73 100644 --- a/docs/runtime/index.mdx +++ b/docs/runtime/index.mdx @@ -94,7 +94,7 @@ Cleaning... Done. ``` -Bun executes the script command in a subshell. On Linux & macOS, it checks for the following shells in order, using the first one it finds: `bash`, `sh`, `zsh`. On windows, it uses [bun shell](https://bun.com/docs/runtime/shell) to support bash-like syntax and many common commands. +Bun executes the script command in a subshell. On Linux & macOS, it checks for the following shells in order, using the first one it finds: `bash`, `sh`, `zsh`. On Windows, it uses [bun shell](/runtime/shell) to support bash-like syntax and many common commands. ⚡️ The startup time for `npm run` on Linux is roughly 170ms; with Bun it is `6ms`. @@ -153,7 +153,7 @@ bun run --filter 'ba*'