Compare commits

..

5 Commits

Author SHA1 Message Date
Claude Bot
3fc5badf4c address review: improve slice() docstring and fix test cleanup
- Document that slice() destructively mutates the underlying buffer via
  @constCast, listing the exact offsets involved
- Move sql.close() into the finally block so cleanup runs even if
  assertions fail

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-17 13:03:58 +00:00
Claude Bot
bdb7308acd fix(sql): remove unsafe @alignCast on unaligned PostgreSQL binary array data
PostgreSQL binary protocol data arrives in network buffers with no
alignment guarantees. The `PostgresBinarySingleDimensionArray.init()`
method used `@alignCast` to cast the raw byte pointer to a struct
pointer with 4-byte alignment, which panics at runtime when the buffer
is not naturally aligned.

Replace the `extern struct` overlay approach with safe `std.mem.readInt`
calls that handle arbitrary alignment, and use `align(1)` pointers for
writing decoded elements back into the buffer.

Closes #27079

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-17 12:52:57 +00:00
robobun
70b354aa04 fix(bundler): include CSS in all HTML entrypoints sharing a deduplicated CSS chunk (#27040) 2026-02-15 03:36:06 -08:00
Jarred Sumner
9d5a800c3d fix(napi,timers): callback scope (#27026) 2026-02-15 03:33:48 -08:00
Jarred Sumner
77ca318336 Reduce the number of closures in generated bundler code (#27022)
### Problem

The bundler's `__toESM` helper creates a new getter-wrapped proxy object
every time a CJS
module is imported. In a large app, a popular dependency like React can
be imported 600+
times — each creating a fresh object with ~44 getter properties. This
produces ~27K
unnecessary `GetterSetter` objects, ~25K closures, and ~25K
`JSLexicalEnvironment` scope
objects at startup.

Additionally, `__export` and `__exportValue` use `var`-scoped loop
variables captured by
setter closures, meaning all setters incorrectly reference the last
iterated key (a latent
  bug).

### Changes

1. **`__toESM`: add WeakMap cache** — deduplicate repeated wrappings of
the same CJS
module. Two caches (one per `isNodeMode` value) to handle both import
modes correctly.
2. **Replace closures with `.bind()`** — `() => obj[key]` becomes
`__accessProp.bind(obj,
key)`. BoundFunction is cheaper than Function + JSLexicalEnvironment,
and frees the for-in
  `JSPropertyNameEnumerator` from the closure scope.
3. **Fix var-scoping bug in `__export`/`__exportValue`** — setter
closures captured a
shared `var name` and would all modify the last iterated key. `.bind()`
eagerly captures
the correct key per iteration.
4. **`__toCommonJS`: `.map()` → `for..of`** — eliminates throwaway array
allocation.
5. **`__reExport`: single `getOwnPropertyNames` call** — was calling it
twice when
`secondTarget` was provided.

### Impact (measured on a ~23MB single-bundle app with 600+ React
imports)

| Metric | Before | After | Delta |
|--------|--------|-------|-------|
| **Total objects** | 745,985 | 664,001 | **-81,984 (-11%)** |
| **Heap size** | 115 MB | 111 MB | **-4 MB** |
| GetterSetter | 34,625 | 13,428 | -21,197 (-61%) |
| Function | 221,302 | 197,024 | -24,278 (-11%) |
| JSLexicalEnvironment | 70,101 | 44,633 | -25,468 (-36%) |
| Structure | 40,254 | 39,762 | -492 |
2026-02-15 00:36:57 -08:00
17 changed files with 582 additions and 271 deletions

View File

@@ -4,7 +4,7 @@ const TimerObjectInternals = @This();
/// Identifier for this timer that is exposed to JavaScript (by `+timer`)
id: i32 = -1,
interval: u31 = 0,
strong_this: jsc.Strong.Optional = .empty,
this_value: jsc.JSRef = .empty(),
flags: Flags = .{},
/// Used by:
@@ -76,31 +76,41 @@ pub fn runImmediateTask(this: *TimerObjectInternals, vm: *VirtualMachine) bool {
// loop alive other than setImmediates
(!this.flags.is_keeping_event_loop_alive and !vm.isEventLoopAliveExcludingImmediates()))
{
this.setEnableKeepingEventLoopAlive(vm, false);
this.this_value.downgrade();
this.deref();
return false;
}
const timer = this.strong_this.get() orelse {
const timer = this.this_value.tryGet() orelse {
if (Environment.isDebug) {
@panic("TimerObjectInternals.runImmediateTask: this_object is null");
}
this.setEnableKeepingEventLoopAlive(vm, false);
this.deref();
return false;
};
const globalThis = vm.global;
this.strong_this.deinit();
this.this_value.downgrade();
this.eventLoopTimer().state = .FIRED;
this.setEnableKeepingEventLoopAlive(vm, false);
timer.ensureStillAlive();
vm.eventLoop().enter();
const callback = ImmediateObject.js.callbackGetCached(timer).?;
const arguments = ImmediateObject.js.argumentsGetCached(timer).?;
this.ref();
const exception_thrown = this.run(globalThis, timer, callback, arguments, this.asyncID(), vm);
this.deref();
if (this.eventLoopTimer().state == .FIRED) {
this.deref();
}
const exception_thrown = brk: {
this.ref();
defer {
if (this.eventLoopTimer().state == .FIRED) {
this.deref();
}
this.deref();
}
break :brk this.run(globalThis, timer, callback, arguments, this.asyncID(), vm);
};
// --- after this point, the timer is no longer guaranteed to be alive ---
vm.eventLoop().exitMaybeDrainMicrotasks(!exception_thrown) catch return true;
@@ -120,7 +130,13 @@ pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *jsc.VirtualMac
this.eventLoopTimer().state = .FIRED;
const globalThis = vm.global;
const this_object = this.strong_this.get().?;
const this_object = this.this_value.tryGet() orelse {
this.setEnableKeepingEventLoopAlive(vm, false);
this.flags.has_cleared_timer = true;
this.this_value.downgrade();
this.deref();
return;
};
const callback: JSValue, const arguments: JSValue, var idle_timeout: JSValue, var repeat: JSValue = switch (kind) {
.setImmediate => .{
@@ -143,7 +159,7 @@ pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *jsc.VirtualMac
}
this.setEnableKeepingEventLoopAlive(vm, false);
this.flags.has_cleared_timer = true;
this.strong_this.deinit();
this.this_value.downgrade();
this.deref();
return;
@@ -152,7 +168,7 @@ pub fn fire(this: *TimerObjectInternals, _: *const timespec, vm: *jsc.VirtualMac
var time_before_call: timespec = undefined;
if (kind != .setInterval) {
this.strong_this.clearWithoutDeallocation();
this.this_value.downgrade();
} else {
time_before_call = timespec.msFromNow(.allow_mocked_time, this.interval);
}
@@ -239,7 +255,7 @@ fn convertToInterval(this: *TimerObjectInternals, global: *JSGlobalObject, timer
// https://github.com/nodejs/node/blob/a7cbb904745591c9a9d047a364c2c188e5470047/lib/internal/timers.js#L613
TimeoutObject.js.idleTimeoutSetCached(timer, global, repeat);
this.strong_this.set(global, timer);
this.this_value.setStrong(timer, global);
this.flags.kind = .setInterval;
this.interval = new_interval;
this.reschedule(timer, vm, global);
@@ -297,7 +313,7 @@ pub fn init(
this.reschedule(timer, vm, global);
}
this.strong_this.set(global, timer);
this.this_value.setStrong(timer, global);
}
pub fn doRef(this: *TimerObjectInternals, _: *jsc.JSGlobalObject, this_value: JSValue) JSValue {
@@ -327,7 +343,7 @@ pub fn doRefresh(this: *TimerObjectInternals, globalObject: *jsc.JSGlobalObject,
return this_value;
}
this.strong_this.set(globalObject, this_value);
this.this_value.setStrong(this_value, globalObject);
this.reschedule(this_value, VirtualMachine.get(), globalObject);
return this_value;
@@ -350,12 +366,18 @@ pub fn cancel(this: *TimerObjectInternals, vm: *VirtualMachine) void {
this.setEnableKeepingEventLoopAlive(vm, false);
this.flags.has_cleared_timer = true;
if (this.flags.kind == .setImmediate) return;
if (this.flags.kind == .setImmediate) {
// Release the strong reference so the GC can collect the JS object.
// The immediate task is still in the event loop queue and will be skipped
// by runImmediateTask when it sees has_cleared_timer == true.
this.this_value.downgrade();
return;
}
const was_active = this.eventLoopTimer().state == .ACTIVE;
this.eventLoopTimer().state = .CANCELLED;
this.strong_this.deinit();
this.this_value.downgrade();
if (was_active) {
vm.timer.remove(this.eventLoopTimer());
@@ -442,12 +464,12 @@ pub fn getDestroyed(this: *TimerObjectInternals) bool {
}
pub fn finalize(this: *TimerObjectInternals) void {
this.strong_this.deinit();
this.this_value.finalize();
this.deref();
}
pub fn deinit(this: *TimerObjectInternals) void {
this.strong_this.deinit();
this.this_value.deinit();
const vm = VirtualMachine.get();
const kind = this.flags.kind;

View File

@@ -22,7 +22,6 @@
#include <wtf/IterationStatus.h>
#include <JavaScriptCore/CodeBlock.h>
#include <JavaScriptCore/FunctionCodeBlock.h>
#include <JavaScriptCore/Interpreter.h>
#include "ErrorStackFrame.h"
@@ -115,9 +114,9 @@ JSCStackTrace JSCStackTrace::fromExisting(JSC::VM& vm, const WTF::Vector<JSC::St
void JSCStackTrace::getFramesForCaller(JSC::VM& vm, JSC::CallFrame* callFrame, JSC::JSCell* owner, JSC::JSValue caller, WTF::Vector<JSC::StackFrame>& stackTrace, size_t stackTraceLimit)
{
// Compute the number of frames to skip by walking the stack to find the caller.
// We need this first pass because Interpreter::getStackTrace uses framesToSkip
// as a count of visible (non-private) frames to skip.
size_t framesCount = 0;
bool belowCaller = false;
int32_t skipFrames = 0;
WTF::String callerName {};
@@ -130,15 +129,29 @@ void JSCStackTrace::getFramesForCaller(JSC::VM& vm, JSC::CallFrame* callFrame, J
callerName = callerFunctionInternal->name();
}
size_t totalFrames = 0;
if (!callerName.isEmpty()) {
JSC::StackVisitor::visit(callFrame, vm, [&](JSC::StackVisitor& visitor) -> WTF::IterationStatus {
if (isImplementationVisibilityPrivate(visitor)) {
return WTF::IterationStatus::Continue;
}
skipFrames += 1;
framesCount += 1;
if (visitor->functionName() == callerName) {
// skip caller frame and all frames above it
if (!belowCaller) {
skipFrames += 1;
if (visitor->functionName() == callerName) {
belowCaller = true;
return WTF::IterationStatus::Continue;
}
}
totalFrames += 1;
if (totalFrames > stackTraceLimit) {
return WTF::IterationStatus::Done;
}
@@ -150,34 +163,95 @@ void JSCStackTrace::getFramesForCaller(JSC::VM& vm, JSC::CallFrame* callFrame, J
return WTF::IterationStatus::Continue;
}
skipFrames += 1;
auto callee = visitor->callee();
if (callee.isCell() && callee.asCell() == caller) {
framesCount += 1;
// skip caller frame and all frames above it
if (!belowCaller) {
auto callee = visitor->callee();
skipFrames += 1;
if (callee.isCell() && callee.asCell() == caller) {
belowCaller = true;
return WTF::IterationStatus::Continue;
}
}
totalFrames += 1;
if (totalFrames > stackTraceLimit) {
return WTF::IterationStatus::Done;
}
return WTF::IterationStatus::Continue;
});
} else if (caller.isEmpty() || caller.isUndefined()) {
// Skip the first frame (captureStackTrace itself).
skipFrames = 1;
// Skip the first frame.
JSC::StackVisitor::visit(callFrame, vm, [&](JSC::StackVisitor& visitor) -> WTF::IterationStatus {
if (isImplementationVisibilityPrivate(visitor)) {
return WTF::IterationStatus::Continue;
}
framesCount += 1;
if (!belowCaller) {
skipFrames += 1;
belowCaller = true;
}
totalFrames += 1;
if (totalFrames > stackTraceLimit) {
return WTF::IterationStatus::Done;
}
return WTF::IterationStatus::Continue;
});
}
// Use Interpreter::getStackTrace which handles async continuation frames
// (frames from functions suspended at await points higher up the async call chain).
// This is critical for compatibility with V8's behavior where Error.captureStackTrace
// includes suspended async frames in the CallSite array.
WTF::Vector<JSC::StackFrame> rawStackTrace;
vm.interpreter.getStackTrace(owner, rawStackTrace, skipFrames, stackTraceLimit);
// Filter out private/internal implementation frames to match the behavior
// of the previous StackVisitor-based approach.
stackTrace.reserveInitialCapacity(rawStackTrace.size());
for (auto& frame : rawStackTrace) {
if (!isImplementationVisibilityPrivate(frame)) {
stackTrace.append(frame);
size_t i = 0;
totalFrames = 0;
stackTrace.reserveInitialCapacity(framesCount);
JSC::StackVisitor::visit(callFrame, vm, [&](JSC::StackVisitor& visitor) -> WTF::IterationStatus {
// Skip native frames
if (isImplementationVisibilityPrivate(visitor)) {
return WTF::IterationStatus::Continue;
}
}
// Skip frames if needed
if (skipFrames > 0) {
skipFrames--;
return WTF::IterationStatus::Continue;
}
totalFrames += 1;
if (totalFrames > stackTraceLimit) {
return WTF::IterationStatus::Done;
}
if (visitor->isNativeCalleeFrame()) {
auto* nativeCallee = visitor->callee().asNativeCallee();
switch (nativeCallee->category()) {
case NativeCallee::Category::Wasm: {
stackTrace.append(StackFrame(visitor->wasmFunctionIndexOrName()));
break;
}
case NativeCallee::Category::InlineCache: {
break;
}
}
#if USE(ALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS)
} else if (!!visitor->codeBlock())
#else
} else if (!!visitor->codeBlock() && !visitor->codeBlock()->unlinkedCodeBlock()->isBuiltinFunction())
#endif
stackTrace.append(StackFrame(vm, owner, visitor->callee().asCell(), visitor->codeBlock(), visitor->bytecodeIndex()));
else
stackTrace.append(StackFrame(vm, owner, visitor->callee().asCell()));
i++;
return (i == framesCount) ? WTF::IterationStatus::Done : WTF::IterationStatus::Continue;
});
}
JSCStackTrace JSCStackTrace::getStackTraceForThrownValue(JSC::VM& vm, JSC::JSValue thrownValue)

View File

@@ -68,6 +68,16 @@ pub const Chunk = struct {
}
pub fn getCSSChunkForHTML(this: *const Chunk, chunks: []Chunk) ?*Chunk {
// Look up the CSS chunk via the JS chunk's css_chunks indices.
// This correctly handles deduplicated CSS chunks that are shared
// across multiple HTML entry points (see issue #23668).
if (this.getJSChunkForHTML(chunks)) |js_chunk| {
const css_chunk_indices = js_chunk.content.javascript.css_chunks;
if (css_chunk_indices.len > 0) {
return &chunks[css_chunk_indices[0]];
}
}
// Fallback: match by entry_point_id for cases without a JS chunk.
const entry_point_id = this.entry_point.entry_point_id;
for (chunks) |*other| {
if (other.content == .css) {

View File

@@ -22,7 +22,6 @@ pub noinline fn computeChunks(
const entry_source_indices = this.graph.entry_points.items(.source_index);
const css_asts = this.graph.ast.items(.css);
const css_chunking = this.options.css_chunking;
var html_chunks = bun.StringArrayHashMap(Chunk).init(temp_allocator);
const loaders = this.parse_graph.input_files.items(.loader);
const ast_targets = this.graph.ast.items(.target);
@@ -148,10 +147,11 @@ pub noinline fn computeChunks(
if (css_source_indices.len > 0) {
const order = this.findImportedFilesInCSSOrder(temp_allocator, css_source_indices.slice());
const use_content_based_key = css_chunking or has_server_html_imports;
const hash_to_use = if (!use_content_based_key)
bun.hash(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len)))
else brk: {
// Always use content-based hashing for CSS chunk deduplication.
// This ensures that when multiple JS entry points import the
// same CSS files, they share a single CSS output chunk rather
// than producing duplicates that collide on hash-based naming.
const hash_to_use = brk: {
var hasher = std.hash.Wyhash.init(5);
bun.writeAnyToHasher(&hasher, order.len);
for (order.slice()) |x| x.hash(&hasher);
@@ -322,7 +322,10 @@ pub noinline fn computeChunks(
const remapped_css_indexes = try temp_allocator.alloc(u32, css_chunks.count());
const css_chunk_values = css_chunks.values();
for (sorted_css_keys, js_chunks.count()..) |key, sorted_index| {
// Use sorted_chunks.len as the starting index because HTML chunks
// may be interleaved with JS chunks, so js_chunks.count() would be
// incorrect when HTML entry points are present.
for (sorted_css_keys, sorted_chunks.len..) |key, sorted_index| {
const index = css_chunks.getIndex(key) orelse unreachable;
sorted_chunks.appendAssumeCapacity(css_chunk_values[index]);
remapped_css_indexes[index] = @intCast(sorted_index);

View File

@@ -766,19 +766,13 @@ pub extern fn napi_type_tag_object(env: napi_env, _: napi_value, _: [*c]const na
pub extern fn napi_check_object_type_tag(env: napi_env, _: napi_value, _: [*c]const napi_type_tag, _: *bool) napi_status;
// do nothing for both of these
pub export fn napi_open_callback_scope(env_: napi_env, _: napi_value, _: *anyopaque, _: *anyopaque) napi_status {
pub export fn napi_open_callback_scope(_: napi_env, _: napi_value, _: *anyopaque, _: *anyopaque) napi_status {
log("napi_open_callback_scope", .{});
const env = env_ orelse {
return envIsNull();
};
return env.ok();
return @intFromEnum(NapiStatus.ok);
}
pub export fn napi_close_callback_scope(env_: napi_env, _: *anyopaque) napi_status {
pub export fn napi_close_callback_scope(_: napi_env, _: *anyopaque) napi_status {
log("napi_close_callback_scope", .{});
const env = env_ orelse {
return envIsNull();
};
return env.ok();
return @intFromEnum(NapiStatus.ok);
}
pub extern fn napi_throw(env: napi_env, @"error": napi_value) napi_status;
pub extern fn napi_throw_error(env: napi_env, code: [*c]const u8, msg: [*c]const u8) napi_status;

View File

@@ -12,24 +12,32 @@ var __getOwnPropNames = Object.getOwnPropertyNames;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __hasOwnProp = Object.prototype.hasOwnProperty;
// Shared getter/setter functions: .bind(obj, key) avoids creating a closure
// and JSLexicalEnvironment per property. BoundFunction is much cheaper.
// Must be regular functions (not arrows) so .bind() can set `this`.
function __accessProp(key) {
return this[key];
}
// This is used to implement "export * from" statements. It copies properties
// from the imported module to the current module's ESM export object. If the
// current module is an entry point and the target format is CommonJS, we
// also copy the properties to "module.exports" in addition to our module's
// internal ESM export object.
export var __reExport = (target, mod, secondTarget) => {
for (let key of __getOwnPropNames(mod))
var keys = __getOwnPropNames(mod);
for (let key of keys)
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, {
get: () => mod[key],
get: __accessProp.bind(mod, key),
enumerable: true,
});
if (secondTarget) {
for (let key of __getOwnPropNames(mod))
for (let key of keys)
if (!__hasOwnProp.call(secondTarget, key) && key !== "default")
__defProp(secondTarget, key, {
get: () => mod[key],
get: __accessProp.bind(mod, key),
enumerable: true,
});
@@ -37,11 +45,22 @@ export var __reExport = (target, mod, secondTarget) => {
}
};
/*__PURE__*/
var __toESMCache_node;
/*__PURE__*/
var __toESMCache_esm;
// Converts the module from CommonJS to ESM. When in node mode (i.e. in an
// ".mjs" file, package.json has "type: module", or the "__esModule" export
// in the CommonJS file is falsy or missing), the "default" property is
// overridden to point to the original CommonJS exports object instead.
export var __toESM = (mod, isNodeMode, target) => {
var canCache = mod != null && typeof mod === "object";
if (canCache) {
var cache = isNodeMode ? (__toESMCache_node ??= new WeakMap()) : (__toESMCache_esm ??= new WeakMap());
var cached = cache.get(mod);
if (cached) return cached;
}
target = mod != null ? __create(__getProtoOf(mod)) : {};
const to =
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
@@ -53,34 +72,34 @@ export var __toESM = (mod, isNodeMode, target) => {
for (let key of __getOwnPropNames(mod))
if (!__hasOwnProp.call(to, key))
__defProp(to, key, {
get: () => mod[key],
get: __accessProp.bind(mod, key),
enumerable: true,
});
if (canCache) cache.set(mod, to);
return to;
};
// Converts the module from ESM to CommonJS. This clones the input module
// object with the addition of a non-enumerable "__esModule" property set
// to "true", which overwrites any existing export named "__esModule".
var __moduleCache = /* @__PURE__ */ new WeakMap();
export var __toCommonJS = /* @__PURE__ */ from => {
var entry = __moduleCache.get(from),
export var __toCommonJS = from => {
var entry = (__moduleCache ??= new WeakMap()).get(from),
desc;
if (entry) return entry;
entry = __defProp({}, "__esModule", { value: true });
if ((from && typeof from === "object") || typeof from === "function")
__getOwnPropNames(from).map(
key =>
!__hasOwnProp.call(entry, key) &&
for (var key of __getOwnPropNames(from))
if (!__hasOwnProp.call(entry, key))
__defProp(entry, key, {
get: () => from[key],
get: __accessProp.bind(from, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable,
}),
);
});
__moduleCache.set(from, entry);
return entry;
};
/*__PURE__*/
var __moduleCache;
// When you do know the module is CJS
export var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
@@ -97,6 +116,10 @@ export var __name = (target, name) => {
// ESM export -> CJS export
// except, writable incase something re-exports
var __returnValue = v => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
export var __export = /* @__PURE__ */ (target, all) => {
for (var name in all)
@@ -104,15 +127,19 @@ export var __export = /* @__PURE__ */ (target, all) => {
get: all[name],
enumerable: true,
configurable: true,
set: newValue => (all[name] = () => newValue),
set: __exportSetter.bind(all, name),
});
};
function __exportValueSetter(name, newValue) {
this[name] = newValue;
}
export var __exportValue = (target, all) => {
for (var name in all) {
__defProp(target, name, {
get: () => all[name],
set: newValue => (all[name] = newValue),
get: __accessProp.bind(all, name),
set: __exportValueSetter.bind(all, name),
enumerable: true,
configurable: true,
});

View File

@@ -185,7 +185,7 @@ pub const Tag = enum(short) {
}
fn PostgresBinarySingleDimensionArray(comptime T: type) type {
return extern struct {
return struct {
// struct array_int4 {
// int4_t ndim; /* Number of dimensions */
// int4_t _ign; /* offset for data, removed by libpq */
@@ -197,44 +197,51 @@ pub const Tag = enum(short) {
// int4_t first_value; /* Beginning of integer data */
// };
ndim: i32,
offset_for_data: i32,
element_type: i32,
// Header is 5 x i32 = 20 bytes (ndim, offset_for_data, element_type, len, index)
const header_size = 20;
// Each array element is preceded by a 4-byte length prefix
const elem_stride = @sizeOf(T) + 4;
const Int = std.meta.Int(.unsigned, @bitSizeOf(T));
len: i32,
index: i32,
first_value: T,
bytes: []const u8,
pub fn slice(this: *@This()) []T {
if (this.len == 0) return &.{};
var head = @as([*]T, @ptrCast(&this.first_value));
var current = head;
const len: usize = @intCast(this.len);
for (0..len) |i| {
// Skip every other value as it contains the size of the element
current = current[1..];
const val = current[0];
const Int = std.meta.Int(.unsigned, @bitSizeOf(T));
const swapped = @byteSwap(@as(Int, @bitCast(val)));
head[i] = @bitCast(swapped);
current = current[1..];
}
return head[0..len];
/// Parses the binary array header from a raw (potentially unaligned) byte slice.
/// Uses std.mem.readInt to safely handle unaligned network data.
pub fn init(bytes: []const u8) @This() {
// Read the len field at offset 12 (after ndim + offset_for_data + element_type)
const len: i32 = @bitCast(std.mem.readInt(u32, bytes[12..16], .big));
return .{
.len = len,
.bytes = bytes,
};
}
pub fn init(bytes: []const u8) *@This() {
const this: *@This() = @ptrCast(@alignCast(@constCast(bytes.ptr)));
this.ndim = @byteSwap(this.ndim);
this.offset_for_data = @byteSwap(this.offset_for_data);
this.element_type = @byteSwap(this.element_type);
this.len = @byteSwap(this.len);
this.index = @byteSwap(this.index);
return this;
/// Reads array elements from the data portion, byte-swapping each value.
/// WARNING: This destructively mutates `this.bytes` (via `@constCast`) by
/// writing decoded elements densely into the header region starting at
/// offset `header_size`. Each element is read from its original position
/// (at `header_size + i * elem_stride + 4`) and written to `header_size +
/// i * @sizeOf(T)`. The returned slice points into this modified buffer.
pub fn slice(this: @This()) []align(1) T {
if (this.len <= 0) return &.{};
const len: usize = @intCast(this.len);
const data = @constCast(this.bytes);
// Data starts after the 20-byte header. Each element has a 4-byte
// length prefix followed by the element bytes.
// We write the decoded elements densely starting at the data region.
const out: [*]align(1) T = @ptrCast(data.ptr + header_size);
for (0..len) |i| {
const elem_offset = header_size + i * elem_stride + 4;
const val = std.mem.readInt(Int, data[elem_offset..][0..@sizeOf(T)], .big);
out[i] = @bitCast(val);
}
return out[0..len];
}
};
}

View File

@@ -2,13 +2,17 @@
exports[`Bun.build Bun.write(BuildArtifact) 1`] = `
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};
@@ -31,13 +35,17 @@ NS.then(({ fn: fn2 }) => {
exports[`Bun.build outdir + reading out blobs works 1`] = `
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};
@@ -58,23 +66,27 @@ NS.then(({ fn: fn2 }) => {
"
`;
exports[`Bun.build BuildArtifact properties: hash 1`] = `"d1c7nm6t"`;
exports[`Bun.build BuildArtifact properties: hash 1`] = `"est79qzq"`;
exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"rm7e36cf"`;
exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"7gfnt0h6"`;
exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"d1c7nm6t"`;
exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"est79qzq"`;
exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`;
exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = `
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};

View File

@@ -1113,7 +1113,7 @@ describe("bundler", () => {
snapshotSourceMap: {
"entry.js.map": {
files: ["../node_modules/react/index.js", "../entry.js"],
mappingsExactMatch: "qYACA,WAAW,IAAQ,EAAE,ICDrB,eACA,QAAQ,IAAI,CAAK",
mappingsExactMatch: "miBACA,WAAW,IAAQ,EAAE,ICDrB,eACA,QAAQ,IAAI,CAAK",
},
},
});

View File

@@ -843,4 +843,60 @@ body {
api.expectFile("out/" + jsFile).toContain("sourceMappingURL");
},
});
// Test that multiple HTML entrypoints sharing the same CSS file both get
// the CSS link tag in production mode (css_chunking deduplication).
// Regression test for https://github.com/oven-sh/bun/issues/23668
itBundled("html/SharedCSSProductionMultipleEntries", {
outdir: "out/",
production: true,
files: {
"/entry1.html": `<!doctype html>
<html>
<head>
<link rel="stylesheet" href="./global.css" />
</head>
<body>
<div id="root"></div>
<script src="./main1.tsx"></script>
</body>
</html>`,
"/entry2.html": `<!doctype html>
<html>
<head>
<link rel="stylesheet" href="./global.css" />
</head>
<body>
<div id="root"></div>
<script src="./main2.tsx"></script>
</body>
</html>`,
"/global.css": `h1 { font-size: 24px; }`,
"/main1.tsx": `console.log("entry1");`,
"/main2.tsx": `console.log("entry2");`,
},
entryPoints: ["/entry1.html", "/entry2.html"],
onAfterBundle(api) {
const entry1Html = api.readFile("out/entry1.html");
const entry2Html = api.readFile("out/entry2.html");
// Both HTML files must contain a CSS link tag
const cssMatch1 = entry1Html.match(/href="(.*\.css)"/);
const cssMatch2 = entry2Html.match(/href="(.*\.css)"/);
expect(cssMatch1).not.toBeNull();
expect(cssMatch2).not.toBeNull();
// Both should reference the same deduplicated CSS chunk
expect(cssMatch1![1]).toBe(cssMatch2![1]);
// The CSS file should contain the shared styles
const cssContent = api.readFile("out/" + cssMatch1![1]);
expect(cssContent).toContain("font-size");
// Both HTML files should also have their respective JS bundles
expect(entry1Html).toMatch(/src=".*\.js"/);
expect(entry2Html).toMatch(/src=".*\.js"/);
},
});
});

View File

@@ -57,17 +57,17 @@ describe("bundler", () => {
"../entry.tsx",
],
mappings: [
["react.development.js:524:'getContextName'", "1:5412:Y1"],
["react.development.js:524:'getContextName'", "1:5567:Y1"],
["react.development.js:2495:'actScopeDepth'", "23:4082:GJ++"],
["react.development.js:696:''Component'", '1:7474:\'Component "%s"'],
["entry.tsx:6:'\"Content-Type\"'", '100:18809:"Content-Type"'],
["entry.tsx:11:'<html>'", "100:19063:void"],
["entry.tsx:23:'await'", "100:19163:await"],
["react.development.js:696:''Component'", '1:7629:\'Component "%s"'],
["entry.tsx:6:'\"Content-Type\"'", '100:18808:"Content-Type"'],
["entry.tsx:11:'<html>'", "100:19062:void"],
["entry.tsx:23:'await'", "100:19161:await"],
],
},
},
expectExactFilesize: {
"out/entry.js": 221720,
"out/entry.js": 221895,
},
run: {
stdout: "<!DOCTYPE html><html><body><h1>Hello World</h1><p>This is an example.</p></body></html>",

View File

@@ -76,13 +76,17 @@ describe("bundler", () => {
expect(bundled).toMatchInlineSnapshot(`
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
@@ -160,7 +164,7 @@ describe("bundler", () => {
var { AsyncEntryPoint: AsyncEntryPoint2 } = await Promise.resolve().then(() => exports_AsyncEntryPoint);
AsyncEntryPoint2();
//# debugId=5E85CC0956C6307964756E2164756E21
//# debugId=42062903F19477CF64756E2164756E21
//# sourceMappingURL=out.js.map
"
`);
@@ -337,13 +341,17 @@ describe("bundler", () => {
expect(bundled).toMatchInlineSnapshot(`
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
@@ -402,7 +410,7 @@ describe("bundler", () => {
var { AsyncEntryPoint: AsyncEntryPoint2 } = await Promise.resolve().then(() => exports_AsyncEntryPoint);
AsyncEntryPoint2();
//# debugId=C92CBF0103732ECC64756E2164756E21
//# debugId=BF876FBF618133C264756E2164756E21
//# sourceMappingURL=out.js.map
"
`);

View File

@@ -2150,10 +2150,7 @@ c {
toplevel-tilde.css: WARNING: CSS nesting syntax is not supported in the configured target environment (chrome10)
`, */
});
// TODO: Bun's bundler doesn't support multiple entry points generating CSS outputs
// with identical content hashes to the same output path. This test exposes that
// limitation. Skip until the bundler can deduplicate or handle this case.
itBundled.skip("css/MetafileCSSBundleTwoToOne", {
itBundled("css/MetafileCSSBundleTwoToOne", {
files: {
"/foo/entry.js": /* js */ `
import '../common.css'

View File

@@ -103,11 +103,11 @@ console.log(favicon);
"files": [
{
"input": "client.html",
"path": "./client-s249t5qg.js",
"path": "./client-b5m4ng86.js",
"loader": "js",
"isEntry": true,
"headers": {
"etag": "fxoJ6L-0X3o",
"etag": "Ax71YVYyZQc",
"content-type": "text/javascript;charset=utf-8"
}
},

View File

@@ -1,134 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe } from "harness";
// GitHub Issue #25695: Error.captureStackTrace with custom prepareStackTrace
// does not include async continuation frames (awaiter frames), causing NX's
// recursion detection to fail and leading to infinite recursion.
test("Error.captureStackTrace includes async continuation frames in CallSite array", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
let callCount = 0;
function getCallSites() {
const prepareStackTraceBackup = Error.prepareStackTrace;
Error.prepareStackTrace = (_, stackTraces) => stackTraces;
const errorObject = {};
Error.captureStackTrace(errorObject);
const trace = errorObject.stack;
Error.prepareStackTrace = prepareStackTraceBackup;
trace.shift();
return trace;
}
function preventRecursion() {
const stackframes = getCallSites().slice(2);
const found = stackframes.some((f) => {
return f.getFunctionName() === 'outerAsync';
});
if (found) {
throw new Error('Loop detected');
}
}
async function outerAsync() {
callCount++;
if (callCount > 5) {
throw new Error('Safety limit');
}
preventRecursion();
await new Promise(resolve => setTimeout(resolve, 1));
const result = await middleAsync();
return result;
}
async function middleAsync() {
return await innerAsync();
}
async function innerAsync() {
return await outerAsync();
}
try {
await outerAsync();
console.log("BUG:" + callCount);
} catch (e) {
if (e.message === 'Loop detected') {
console.log("OK:" + callCount);
} else {
console.log("FAIL:" + e.message);
}
}
`,
],
env: bunEnv,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Should detect the recursion, not hit the safety limit
expect(stdout.trim()).toStartWith("OK:");
expect(exitCode).toBe(0);
});
test("Error.captureStackTrace async frames have correct function names", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
function getCallSites() {
const backup = Error.prepareStackTrace;
Error.prepareStackTrace = (_, sites) => sites;
const obj = {};
Error.captureStackTrace(obj);
const trace = obj.stack;
Error.prepareStackTrace = backup;
return trace;
}
let captured = null;
async function alphaAsync() {
await new Promise(resolve => setTimeout(resolve, 1));
const result = await betaAsync();
return result;
}
async function betaAsync() {
return await gammaAsync();
}
async function gammaAsync() {
// Capture the stack trace inside the innermost async function
captured = getCallSites().map(s => s.getFunctionName()).filter(Boolean);
return 42;
}
await alphaAsync();
// The captured stack should include gammaAsync's callers (betaAsync, alphaAsync)
// via async continuation frames
const hasGamma = captured.includes('gammaAsync');
const hasBeta = captured.includes('betaAsync');
const hasAlpha = captured.includes('alphaAsync');
console.log(JSON.stringify({ hasGamma, hasBeta, hasAlpha, frames: captured }));
`,
],
env: bunEnv,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
const result = JSON.parse(stdout.trim());
// gammaAsync is the current frame (synchronous), should always be present
expect(result.hasGamma).toBe(true);
// betaAsync and alphaAsync are async continuation frames
expect(result.hasBeta).toBe(true);
expect(result.hasAlpha).toBe(true);
expect(exitCode).toBe(0);
});

View File

@@ -0,0 +1,231 @@
import { SQL } from "bun";
import { expect, test } from "bun:test";
import net from "net";
// Regression test for https://github.com/oven-sh/bun/issues/27079
// Bun crashes with "incorrect alignment" panic when processing binary-format
// PostgreSQL int4[] or float4[] arrays from a network buffer whose alignment
// doesn't match the struct's natural alignment (4 bytes).
test("PostgreSQL binary int4_array should not crash on unaligned data", async () => {
// We build a mock PostgreSQL server that returns a binary int4_array column.
// The server introduces a 1-byte padding before the DataRow payload to ensure
// the array data is NOT 4-byte aligned, which triggered the original panic.
const server = net.createServer(socket => {
let gotStartup = false;
socket.on("data", data => {
if (!gotStartup) {
gotStartup = true;
// Client sent startup message. Respond with:
// 1. AuthenticationOk
// 2. ParameterStatus (server_encoding = UTF8)
// 3. BackendKeyData
// 4. ReadyForQuery (idle)
const authOk = pgMsg("R", int32BE(0)); // AuthOk
const paramStatus = pgMsg("S", Buffer.concat([cstr("client_encoding"), cstr("UTF8")]));
const backendKey = pgMsg("K", Buffer.concat([int32BE(1234), int32BE(5678)]));
const ready = pgMsg("Z", Buffer.from([0x49])); // 'I' = idle
socket.write(Buffer.concat([authOk, paramStatus, backendKey, ready]));
return;
}
// Assume any subsequent data is a query. Respond with a result set
// containing one row with one column: an int4[] array in binary format.
// RowDescription: 1 field
// name = "arr"
// table_oid = 0, column_index = 0
// type_oid = 1007 (int4_array)
// type_size = -1, type_modifier = -1
// format = 1 (binary)
const fieldName = cstr("arr");
const rowDesc = pgMsg(
"T",
Buffer.concat([
int16BE(1), // number of fields
fieldName,
int32BE(0), // table OID
int16BE(0), // column index
int32BE(1007), // type OID = int4_array
int16BE(-1), // type size
int32BE(-1), // type modifier
int16BE(1), // format code = binary
]),
);
// Build the binary int4 array payload:
// PostgreSQL binary array format:
// ndim (4 bytes) = 1
// has_nulls (4 bytes) = 0
// element_type (4 bytes) = 23 (int4)
// dim_length (4 bytes) = 3 (3 elements)
// dim_lower_bound (4 bytes) = 1
// For each element: length (4 bytes) + value (4 bytes)
const arrayData = Buffer.concat([
int32BE(1), // ndim = 1
int32BE(0), // has_nulls = 0
int32BE(23), // element_type = int4
int32BE(3), // length = 3 elements
int32BE(1), // lower bound = 1
// Element 0: length=4, value=10
int32BE(4),
int32BE(10),
// Element 1: length=4, value=20
int32BE(4),
int32BE(20),
// Element 2: length=4, value=30
int32BE(4),
int32BE(30),
]);
// DataRow: 1 column
const dataRow = pgMsg(
"D",
Buffer.concat([
int16BE(1), // number of columns
int32BE(arrayData.length), // column data length
arrayData,
]),
);
// CommandComplete
const cmdComplete = pgMsg("C", cstr("SELECT 1"));
// ReadyForQuery (idle)
const ready2 = pgMsg("Z", Buffer.from([0x49]));
socket.write(Buffer.concat([rowDesc, dataRow, cmdComplete, ready2]));
});
});
await new Promise<void>(r => server.listen(0, "127.0.0.1", () => r()));
const port = (server.address() as net.AddressInfo).port;
const sql = new SQL({
url: `postgres://test@127.0.0.1:${port}/test`,
max: 1,
idle_timeout: 1,
});
try {
const rows = await sql`SELECT 1`;
// The query should succeed without an alignment panic.
// Verify we got an Int32Array with the correct values.
expect(rows.length).toBe(1);
const arr = rows[0].arr;
expect(arr).toBeInstanceOf(Int32Array);
expect(Array.from(arr)).toEqual([10, 20, 30]);
} finally {
await sql.close();
server.close();
}
});
test("PostgreSQL binary float4_array should not crash on unaligned data", async () => {
const server = net.createServer(socket => {
let gotStartup = false;
socket.on("data", data => {
if (!gotStartup) {
gotStartup = true;
const authOk = pgMsg("R", int32BE(0));
const paramStatus = pgMsg("S", Buffer.concat([cstr("client_encoding"), cstr("UTF8")]));
const backendKey = pgMsg("K", Buffer.concat([int32BE(1234), int32BE(5678)]));
const ready = pgMsg("Z", Buffer.from([0x49]));
socket.write(Buffer.concat([authOk, paramStatus, backendKey, ready]));
return;
}
// RowDescription: 1 field with float4_array (OID 1021) in binary format
const fieldName = cstr("arr");
const rowDesc = pgMsg(
"T",
Buffer.concat([
int16BE(1),
fieldName,
int32BE(0),
int16BE(0),
int32BE(1021), // type OID = float4_array
int16BE(-1),
int32BE(-1),
int16BE(1), // binary format
]),
);
// Binary float4 array: [1.5, 2.5]
const arrayData = Buffer.concat([
int32BE(1), // ndim = 1
int32BE(0), // has_nulls = 0
int32BE(700), // element_type = float4
int32BE(2), // length = 2 elements
int32BE(1), // lower bound = 1
// Element 0: length=4, value=1.5
int32BE(4),
float32BE(1.5),
// Element 1: length=4, value=2.5
int32BE(4),
float32BE(2.5),
]);
const dataRow = pgMsg("D", Buffer.concat([int16BE(1), int32BE(arrayData.length), arrayData]));
const cmdComplete = pgMsg("C", cstr("SELECT 1"));
const ready2 = pgMsg("Z", Buffer.from([0x49]));
socket.write(Buffer.concat([rowDesc, dataRow, cmdComplete, ready2]));
});
});
await new Promise<void>(r => server.listen(0, "127.0.0.1", () => r()));
const port = (server.address() as net.AddressInfo).port;
const sql = new SQL({
url: `postgres://test@127.0.0.1:${port}/test`,
max: 1,
idle_timeout: 1,
});
try {
const rows = await sql`SELECT 1`;
expect(rows.length).toBe(1);
const arr = rows[0].arr;
expect(arr).toBeInstanceOf(Float32Array);
expect(Array.from(arr)).toEqual([1.5, 2.5]);
} finally {
await sql.close();
server.close();
}
});
// Helper functions
function pgMsg(type: string, payload: Buffer): Buffer {
const len = payload.length + 4;
const buf = Buffer.alloc(5 + payload.length);
buf.write(type, 0, 1, "ascii");
buf.writeInt32BE(len, 1);
payload.copy(buf, 5);
return buf;
}
function int32BE(val: number): Buffer {
const buf = Buffer.alloc(4);
buf.writeInt32BE(val, 0);
return buf;
}
function int16BE(val: number): Buffer {
const buf = Buffer.alloc(2);
buf.writeInt16BE(val, 0);
return buf;
}
function float32BE(val: number): Buffer {
const buf = Buffer.alloc(4);
buf.writeFloatBE(val, 0);
return buf;
}
function cstr(s: string): Buffer {
return Buffer.concat([Buffer.from(s, "utf8"), Buffer.from([0])]);
}

View File

@@ -92,13 +92,17 @@ test("cyclic imports with async dependencies should generate async wrappers", as
expect(bundled).toMatchInlineSnapshot(`
"var __defProp = Object.defineProperty;
var __returnValue = (v) => v;
function __exportSetter(name, newValue) {
this[name] = __returnValue.bind(null, newValue);
}
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, {
get: all[name],
enumerable: true,
configurable: true,
set: (newValue) => all[name] = () => newValue
set: __exportSetter.bind(all, name)
});
};
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
@@ -176,7 +180,7 @@ test("cyclic imports with async dependencies should generate async wrappers", as
var { AsyncEntryPoint: AsyncEntryPoint2 } = await Promise.resolve().then(() => exports_AsyncEntryPoint);
AsyncEntryPoint2();
//# debugId=986E7BD819E590FD64756E2164756E21
//# debugId=2020261114B67BB564756E2164756E21
//# sourceMappingURL=entryBuild.js.map
"
`);