Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
4b10b907fc Replace std.ArrayList with bun.collections.ArrayListDefault
Replace 5 instances of `std.ArrayList(...).init(bun.default_allocator)`
with `bun.collections.ArrayListDefault(...)` for better memory management.

Changes:
- src/string.zig: ArrayList(u8) for string formatting
- src/install/lockfile.zig: ArrayList(u8) for lockfile serialization
- src/io/PipeWriter.zig: ArrayList(u8) in StreamBuffer struct
- src/s3/list_objects.zig: ArrayList for S3 contents and prefixes
- src/bundler/bundle_v2.zig: ArrayList(OutputFile) return type

ArrayListDefault uses the default allocator with zero overhead while
providing consistent deinit semantics. Used deinitShallow() where
element types don't have deinit methods (e.g., []const u8).
2025-11-08 03:44:00 +00:00
7 changed files with 42 additions and 42 deletions

View File

@@ -322,7 +322,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
allocator,
.{ .js = vm.event_loop },
);
const bundled_outputs = bundled_outputs_list.items;
const bundled_outputs = bundled_outputs_list.items();
if (bundled_outputs.len == 0) {
Output.prettyln("done", .{});
Output.flush();

View File

@@ -1548,7 +1548,7 @@ pub const BundleV2 = struct {
bake_options: BakeOptions,
alloc: std.mem.Allocator,
event_loop: EventLoop,
) !std.ArrayList(options.OutputFile) {
) !bun.collections.ArrayListDefault(options.OutputFile) {
var this = try BundleV2.init(
server_transpiler,
bake_options,
@@ -1596,7 +1596,7 @@ pub const BundleV2 = struct {
);
if (chunks.len == 0) {
return std.ArrayList(options.OutputFile).init(bun.default_allocator);
return bun.collections.ArrayListDefault(options.OutputFile).init();
}
return try this.linker.generateChunksInParallel(chunks, false);

View File

@@ -2,7 +2,7 @@ pub fn generateChunksInParallel(
c: *LinkerContext,
chunks: []Chunk,
comptime is_dev_server: bool,
) !if (is_dev_server) void else std.ArrayList(options.OutputFile) {
) !if (is_dev_server) void else bun.collections.ArrayListDefault(options.OutputFile) {
const trace = bun.perf.trace("Bundler.generateChunksInParallel");
defer trace.end();

View File

@@ -1257,7 +1257,7 @@ pub fn saveToDisk(this: *Lockfile, load_result: *const LoadResult, options: *con
break :bytes writer_buf.list.items;
}
var bytes = std.ArrayList(u8).init(bun.default_allocator);
var bytes = bun.collections.ArrayListDefault(u8).init();
var total_size: usize = 0;
var end_pos: usize = 0;
@@ -1265,9 +1265,9 @@ pub fn saveToDisk(this: *Lockfile, load_result: *const LoadResult, options: *con
Output.err(err, "failed to serialize lockfile", .{});
Global.crash();
};
if (bytes.items.len >= end_pos)
bytes.items[end_pos..][0..@sizeOf(usize)].* = @bitCast(total_size);
break :bytes bytes.items;
if (bytes.items().len >= end_pos)
bytes.items()[end_pos..][0..@sizeOf(usize)].* = @bitCast(total_size);
break :bytes bytes.toOwnedSlice() catch bun.outOfMemory();
};
defer bun.default_allocator.free(bytes);

View File

@@ -1097,7 +1097,7 @@ pub fn WindowsBufferedWriter(Parent: type, function_table: anytype) type {
/// Basic std.ArrayList(u8) + usize cursor wrapper
pub const StreamBuffer = struct {
list: std.ArrayList(u8) = std.ArrayList(u8).init(bun.default_allocator),
list: bun.collections.ArrayListDefault(u8) = bun.collections.ArrayListDefault(u8).init(),
cursor: usize = 0,
pub fn reset(this: *StreamBuffer) void {
@@ -1107,19 +1107,19 @@ pub const StreamBuffer = struct {
}
pub fn maybeShrink(this: *StreamBuffer) void {
if (this.list.capacity > std.heap.pageSize()) {
if (this.list.capacity() > std.heap.pageSize()) {
// workaround insane zig decision to make it undefined behavior to resize .len < .capacity
this.list.expandToCapacity();
this.list.expandToCapacity(undefined);
this.list.shrinkAndFree(std.heap.pageSize());
}
}
pub fn memoryCost(this: *const StreamBuffer) usize {
return this.list.capacity;
return this.list.capacity();
}
pub fn size(this: *const StreamBuffer) usize {
return this.list.items.len - this.cursor;
return this.list.items().len - this.cursor;
}
pub fn isEmpty(this: *const StreamBuffer) bool {
@@ -1152,7 +1152,7 @@ pub const StreamBuffer = struct {
pub fn writeTypeAsBytesAssumeCapacity(this: *StreamBuffer, comptime T: type, data: T) void {
var byte_list = bun.ByteList.moveFromList(&this.list);
defer this.list = byte_list.moveToListManaged(this.list.allocator);
defer this.list = byte_list.moveToListManaged(this.list.allocator());
byte_list.writeTypeAsBytesAssumeCapacity(T, data);
}
@@ -1164,20 +1164,20 @@ pub const StreamBuffer = struct {
{
var byte_list = bun.ByteList.moveFromList(&this.list);
defer this.list = byte_list.moveToListManaged(this.list.allocator);
_ = try byte_list.writeLatin1(this.list.allocator, buffer);
defer this.list = byte_list.moveToListManaged(this.list.allocator());
_ = try byte_list.writeLatin1(this.list.allocator(), buffer);
}
return this.list.items[this.cursor..];
return this.list.items()[this.cursor..];
} else if (comptime @TypeOf(writeFn) == @TypeOf(&writeUTF16) and writeFn == &writeUTF16) {
{
var byte_list = bun.ByteList.moveFromList(&this.list);
defer this.list = byte_list.moveToListManaged(this.list.allocator);
defer this.list = byte_list.moveToListManaged(this.list.allocator());
_ = try byte_list.writeUTF16(this.list.allocator, buffer);
_ = try byte_list.writeUTF16(this.list.allocator(), buffer);
}
return this.list.items[this.cursor..];
return this.list.items()[this.cursor..];
} else if (comptime @TypeOf(writeFn) == @TypeOf(&write) and writeFn == &write) {
return buffer;
} else {
@@ -1193,25 +1193,25 @@ pub const StreamBuffer = struct {
}
var byte_list = bun.ByteList.moveFromList(&this.list);
defer this.list = byte_list.moveToListManaged(this.list.allocator);
defer this.list = byte_list.moveToListManaged(this.list.allocator());
_ = try byte_list.writeLatin1(this.list.allocator, buffer);
_ = try byte_list.writeLatin1(this.list.allocator(), buffer);
}
pub fn writeUTF16(this: *StreamBuffer, buffer: []const u16) OOM!void {
var byte_list = bun.ByteList.moveFromList(&this.list);
defer this.list = byte_list.moveToListManaged(this.list.allocator);
defer this.list = byte_list.moveToListManaged(this.list.allocator());
_ = try byte_list.writeUTF16(this.list.allocator, buffer);
_ = try byte_list.writeUTF16(this.list.allocator(), buffer);
}
pub fn slice(this: *const StreamBuffer) []const u8 {
return this.list.items[this.cursor..];
return this.list.items()[this.cursor..];
}
pub fn deinit(this: *StreamBuffer) void {
this.cursor = 0;
if (this.list.capacity > 0) {
if (this.list.capacity() > 0) {
this.list.clearAndFree();
}
}

View File

@@ -59,16 +59,16 @@ pub const S3ListObjectsV2Result = struct {
continuation_token: ?[]const u8,
next_continuation_token: ?[]const u8,
start_after: ?[]const u8,
common_prefixes: ?std.ArrayList([]const u8),
contents: ?std.ArrayList(S3ListObjectsContents),
common_prefixes: ?bun.collections.ArrayListDefault([]const u8),
contents: ?bun.collections.ArrayListDefault(S3ListObjectsContents),
pub fn deinit(this: *const @This()) void {
if (this.contents) |contents| {
for (contents.items) |*item| item.deinit();
for (contents.items()) |*item| item.deinit();
contents.deinit();
}
if (this.common_prefixes) |common_prefixes| {
common_prefixes.deinit();
common_prefixes.deinitShallow();
}
}
@@ -115,9 +115,9 @@ pub const S3ListObjectsV2Result = struct {
}
if (this.contents) |contents| {
const jsContents = try JSValue.createEmptyArray(globalObject, contents.items.len);
const jsContents = try JSValue.createEmptyArray(globalObject, contents.items().len);
for (contents.items, 0..) |item, i| {
for (contents.items(), 0..) |item, i| {
const objectInfo = JSValue.createEmptyObject(globalObject, 1);
objectInfo.put(globalObject, jsc.ZigString.static("key"), try bun.String.createUTF8ForJS(globalObject, item.key));
@@ -165,9 +165,9 @@ pub const S3ListObjectsV2Result = struct {
}
if (this.common_prefixes) |common_prefixes| {
const jsCommonPrefixes = try JSValue.createEmptyArray(globalObject, common_prefixes.items.len);
const jsCommonPrefixes = try JSValue.createEmptyArray(globalObject, common_prefixes.items().len);
for (common_prefixes.items, 0..) |prefix, i| {
for (common_prefixes.items(), 0..) |prefix, i| {
const jsPrefix = JSValue.createEmptyObject(globalObject, 1);
jsPrefix.put(globalObject, jsc.ZigString.static("prefix"), try bun.String.createUTF8ForJS(globalObject, prefix));
try jsCommonPrefixes.putIndex(globalObject, @intCast(i), jsPrefix);
@@ -196,8 +196,8 @@ pub fn parseS3ListObjectsResult(xml: []const u8) !S3ListObjectsV2Result {
.start_after = null,
};
var contents = std.ArrayList(S3ListObjectsContents).init(bun.default_allocator);
var common_prefixes = std.ArrayList([]const u8).init(bun.default_allocator);
var contents = bun.collections.ArrayListDefault(S3ListObjectsContents).init();
var common_prefixes = bun.collections.ArrayListDefault([]const u8).init();
// we dont use trailing ">" as it may finish with xmlns=...
if (strings.indexOf(xml, "<ListBucketResult")) |delete_result_pos| {
@@ -482,17 +482,17 @@ pub fn parseS3ListObjectsResult(xml: []const u8) !S3ListObjectsV2Result {
}
}
if (contents.items.len != 0) {
if (contents.items().len != 0) {
result.contents = contents;
} else {
for (contents.items) |*item| item.deinit();
for (contents.items()) |*item| item.deinit();
contents.deinit();
}
if (common_prefixes.items.len != 0) {
if (common_prefixes.items().len != 0) {
result.common_prefixes = common_prefixes;
} else {
common_prefixes.deinit();
common_prefixes.deinitShallow();
}
}

View File

@@ -857,10 +857,10 @@ pub const String = extern struct {
pub fn createFormatForJS(globalObject: *jsc.JSGlobalObject, comptime fmt: [:0]const u8, args: anytype) bun.JSError!jsc.JSValue {
jsc.markBinding(@src());
var builder = std.ArrayList(u8).init(bun.default_allocator);
var builder = bun.collections.ArrayListDefault(u8).init();
defer builder.deinit();
bun.handleOom(builder.writer().print(fmt, args));
return bun.cpp.BunString__createUTF8ForJS(globalObject, builder.items.ptr, builder.items.len);
return bun.cpp.BunString__createUTF8ForJS(globalObject, builder.items().ptr, builder.items().len);
}
pub fn parseDate(this: *String, globalObject: *jsc.JSGlobalObject) bun.JSError!f64 {