Compare commits

...

4 Commits

Author SHA1 Message Date
Dylan Conway
ae865c8f93 update 2026-01-22 15:09:16 -08:00
Claude Bot
f7b752edd7 Merge main into claude/mimalloc-v3-update 2026-01-22 13:11:03 +00:00
Claude Bot
46865d37dd Retry CI 2026-01-22 11:55:35 +00:00
Claude Bot
5c592bb05a Upgrade mimalloc to v3.2.7
Upgrade mimalloc from v2.x to v3.2.7 using Bun's fork with customizations.

Key API changes in mimalloc v3:
- mi_heap_get_default/mi_heap_set_default removed; use global mi_malloc/mi_free
  for thread-local allocations which internally use the theap API
- mi_heap_check_owned replaced with mi_heap_contains
- mi_heap_* functions still work for explicitly created heaps (mi_heap_new)

The MimallocArena now uses two separate vtables:
- heap_allocator_vtable: for owned heaps created with mi_heap_new()
- global_mimalloc_vtable: for default thread-local allocations using
  global mi_malloc/mi_free functions

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-22 10:10:08 +00:00
3 changed files with 122 additions and 69 deletions

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
41c6f59487c7323412e4ae2ccb960846076bf832
)
set(MIMALLOC_CMAKE_ARGS
@@ -14,7 +14,8 @@ set(MIMALLOC_CMAKE_ARGS
-DMI_BUILD_TESTS=OFF
-DMI_USE_CXX=ON
-DMI_SKIP_COLLECT_ON_EXIT=ON
-DMI_PRIM_HAS_PROCESS_ATTACH=1
# ```
# mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
# Started development server: http://localhost:3004
@@ -51,7 +52,7 @@ if(ENABLE_ASAN)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
elseif(APPLE OR LINUX)
if(APPLE)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
else()
@@ -77,9 +78,9 @@ endif()
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
set(MIMALLOC_LIBRARY mimalloc-debug)
else()
set(MIMALLOC_LIBRARY mimalloc-static)
set(MIMALLOC_LIBRARY mimalloc)
endif()
elseif(DEBUG)
if (ENABLE_ASAN)

View File

@@ -8,9 +8,9 @@ const Self = @This();
///
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
pub const Default = struct {
pub fn allocator(self: Default) std.mem.Allocator {
_ = self;
return Borrowed.getDefault().allocator();
pub fn allocator(_: Default) std.mem.Allocator {
// Use global mimalloc functions which are thread-safe
return .{ .ptr = undefined, .vtable = &global_mimalloc_vtable };
}
};
@@ -23,11 +23,14 @@ pub const Borrowed = struct {
#heap: BorrowedHeap,
pub fn allocator(self: Borrowed) std.mem.Allocator {
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
return .{ .ptr = self.#heap, .vtable = &heap_allocator_vtable };
}
pub fn getDefault() Borrowed {
return .{ .#heap = getThreadHeap() };
// This is a legacy function - prefer using Default.allocator() or getThreadLocalDefault()
// For backwards compatibility, return a borrowed with undefined heap
// but callers should use the global allocator instead
return .{ .#heap = undefined };
}
pub fn gc(self: Borrowed) void {
@@ -41,15 +44,17 @@ pub const Borrowed = struct {
}
}
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
pub fn ownsPtr(_: Borrowed, ptr: *const anyopaque) bool {
// In mimalloc v3, mi_heap_check_owned was removed.
// Use mi_check_owned which checks if ptr is in any mimalloc heap.
return mimalloc.mi_check_owned(ptr);
}
fn fromOpaque(ptr: *anyopaque) Borrowed {
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
}
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
pub fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
return if (comptime safety_checks) self.#heap.inner else self.#heap;
}
@@ -98,19 +103,6 @@ const DebugHeap = struct {
pub const deinit = void;
};
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
fn getThreadHeap() BorrowedHeap {
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
if (thread_heap == null) {
thread_heap = .{
.inner = mimalloc.mi_heap_get_default(),
.thread_lock = .initLocked(),
};
}
return &thread_heap.?;
}
const log = bun.Output.scoped(.mimalloc, .hidden);
pub fn allocator(self: Self) std.mem.Allocator {
@@ -121,17 +113,15 @@ pub fn borrow(self: Self) Borrowed {
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
}
/// Internally, mimalloc calls mi_heap_get_default()
/// to get the default heap.
/// It uses pthread_getspecific to do that.
/// We can save those extra calls if we just do it once in here
/// Returns the default thread-local mimalloc allocator.
/// Uses global mimalloc functions which are thread-safe.
pub fn getThreadLocalDefault() std.mem.Allocator {
if (bun.Environment.enable_asan) return bun.default_allocator;
return Borrowed.getDefault().allocator();
return .{ .ptr = undefined, .vtable = &global_mimalloc_vtable };
}
pub fn backingAllocator(_: Self) std.mem.Allocator {
return getThreadLocalDefault();
return bun.default_allocator;
}
pub fn dumpThreadStats(_: Self) void {
@@ -191,19 +181,23 @@ fn alignedAllocSize(ptr: [*]u8) usize {
return mimalloc.mi_malloc_usable_size(ptr);
}
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
// ============================================================================
// VTable functions for owned heaps (created with mi_heap_new)
// ============================================================================
fn heap_vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return self.alignedAlloc(len, alignment);
}
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
fn heap_vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
return mimalloc.mi_expand(buf.ptr, new_len) != null;
}
fn vtable_free(
fn heap_vtable_free(
_: *anyopaque,
buf: []u8,
alignment: Alignment,
@@ -223,26 +217,7 @@ fn vtable_free(
}
}
/// Attempt to expand or shrink memory, allowing relocation.
///
/// `memory.len` must equal the length requested from the most recent
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
/// equal the same value that was passed as the `alignment` parameter to
/// the original `alloc` call.
///
/// A non-`null` return value indicates the resize was successful. The
/// allocation may have same address, or may have been relocated. In either
/// case, the allocation now has size of `new_len`. A `null` return value
/// indicates that the resize would be equivalent to allocating new memory,
/// copying the bytes from the old memory, and then freeing the old memory.
/// In such case, it is more efficient for the caller to perform the copy.
///
/// `new_len` must be greater than zero.
///
/// `ret_addr` is optionally provided as the first return address of the
/// allocation call stack. If the value is `0` it means no return address
/// has been provided.
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
fn heap_vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
const self: Borrowed = .fromOpaque(ptr);
self.assertThreadLock();
const heap = self.getMimallocHeap();
@@ -251,15 +226,82 @@ fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize
return @ptrCast(value);
}
pub fn isInstance(alloc: std.mem.Allocator) bool {
return alloc.vtable == &c_allocator_vtable;
// ============================================================================
// VTable functions for global/default allocator (uses thread-local theap)
// ============================================================================
fn global_vtable_alloc(_: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
log("Global Malloc: {d}\n", .{len});
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
mimalloc.mi_malloc_aligned(len, alignment.toByteUnits())
else
mimalloc.mi_malloc(len);
if (comptime bun.Environment.isDebug) {
const usable = mimalloc.mi_malloc_usable_size(ptr);
if (usable < len) {
std.debug.panic("mimalloc: allocated size is too small: {d} < {d}", .{ usable, len });
}
}
return if (ptr) |p|
@as([*]u8, @ptrCast(p))
else
null;
}
const c_allocator_vtable = std.mem.Allocator.VTable{
.alloc = vtable_alloc,
.resize = vtable_resize,
.remap = vtable_remap,
.free = vtable_free,
fn global_vtable_resize(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
return mimalloc.mi_expand(buf.ptr, new_len) != null;
}
fn global_vtable_free(
_: *anyopaque,
buf: []u8,
alignment: Alignment,
_: usize,
) void {
if (comptime bun.Environment.isDebug) {
assert(mimalloc.mi_is_in_heap_region(buf.ptr));
if (mimalloc.mustUseAlignedAlloc(alignment))
mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits())
else
mimalloc.mi_free_size(buf.ptr, buf.len);
} else {
mimalloc.mi_free(buf.ptr);
}
}
fn global_vtable_remap(_: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
const aligned_size = alignment.toByteUnits();
const value = mimalloc.mi_realloc_aligned(buf.ptr, new_len, aligned_size);
return @ptrCast(value);
}
// ============================================================================
// VTables
// ============================================================================
pub fn isInstance(alloc: std.mem.Allocator) bool {
return alloc.vtable == &heap_allocator_vtable or alloc.vtable == &global_mimalloc_vtable;
}
/// VTable for owned MimallocArena heaps (created with mi_heap_new).
/// Uses heap-specific mi_heap_* functions.
const heap_allocator_vtable = std.mem.Allocator.VTable{
.alloc = heap_vtable_alloc,
.resize = heap_vtable_resize,
.remap = heap_vtable_remap,
.free = heap_vtable_free,
};
/// VTable for global/default mimalloc allocator.
/// Uses global mi_malloc/mi_free functions which are thread-safe.
const global_mimalloc_vtable = std.mem.Allocator.VTable{
.alloc = global_vtable_alloc,
.resize = global_vtable_resize,
.remap = global_vtable_remap,
.free = global_vtable_free,
};
const std = @import("std");

View File

@@ -60,17 +60,28 @@ pub const Heap = opaque {
return mi_heap_realloc(self, p, newsize);
}
pub fn isOwned(self: *Heap, p: ?*anyopaque) bool {
return mi_heap_check_owned(self, p);
pub fn isOwned(self: *Heap, p: ?*const anyopaque) bool {
return mi_heap_contains(self, p);
}
};
pub extern fn mi_heap_new() ?*Heap;
pub extern fn mi_heap_delete(heap: *Heap) void;
pub extern fn mi_heap_destroy(heap: *Heap) void;
pub extern fn mi_heap_set_default(heap: *Heap) *Heap;
pub extern fn mi_heap_get_default() *Heap;
pub extern fn mi_heap_get_backing() *Heap;
pub extern fn mi_heap_collect(heap: *Heap, force: bool) void;
pub extern fn mi_heap_main() *Heap;
// Thread-local heap (theap) API - new in mimalloc v3
pub const THeap = opaque {};
pub extern fn mi_theap_get_default() *THeap;
pub extern fn mi_theap_set_default(theap: *THeap) *THeap;
pub extern fn mi_theap_collect(theap: *THeap, force: bool) void;
pub extern fn mi_theap_malloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_zalloc(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_calloc(theap: *THeap, count: usize, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_small(theap: *THeap, size: usize) ?*anyopaque;
pub extern fn mi_theap_malloc_aligned(theap: *THeap, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_theap_realloc(theap: *THeap, p: ?*anyopaque, newsize: usize) ?*anyopaque;
pub extern fn mi_heap_theap(heap: *Heap) *THeap;
pub extern fn mi_heap_malloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_zalloc(heap: *Heap, size: usize) ?*anyopaque;
pub extern fn mi_heap_calloc(heap: *Heap, count: usize, size: usize) ?*anyopaque;
@@ -102,8 +113,7 @@ pub extern fn mi_heap_rezalloc_aligned(heap: *Heap, p: ?*anyopaque, newsize: usi
pub extern fn mi_heap_rezalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newsize: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize) ?*anyopaque;
pub extern fn mi_heap_recalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize, offset: usize) ?*anyopaque;
pub extern fn mi_heap_contains_block(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_heap_check_owned(heap: *Heap, p: *const anyopaque) bool;
pub extern fn mi_heap_contains(heap: *const Heap, p: ?*const anyopaque) bool;
pub extern fn mi_check_owned(p: ?*const anyopaque) bool;
pub const struct_mi_heap_area_s = extern struct {
blocks: ?*anyopaque,