mirror of
https://github.com/oven-sh/bun
synced 2026-02-13 20:39:05 +00:00
Fix bug
This commit is contained in:
@@ -20,7 +20,7 @@ side: ?bun.bake.Side,
|
||||
/// This is only set for the JS bundle, and not files associated with an
|
||||
/// entrypoint like sourcemaps and bytecode
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
source_index: Index.Optional = .none,
|
||||
|
||||
pub const Index = bun.GenericIndex(u32, OutputFile);
|
||||
@@ -30,7 +30,7 @@ pub fn deinit(this: *OutputFile) void {
|
||||
|
||||
bun.default_allocator.free(this.src_path.text);
|
||||
bun.default_allocator.free(this.dest_path);
|
||||
bun.default_allocator.free(this.referenced_css_files);
|
||||
bun.default_allocator.free(this.referenced_css_chunks);
|
||||
}
|
||||
|
||||
// Depending on:
|
||||
@@ -206,7 +206,7 @@ pub const Options = struct {
|
||||
},
|
||||
side: ?bun.bake.Side,
|
||||
entry_point_index: ?u32,
|
||||
referenced_css_files: []const Index = &.{},
|
||||
referenced_css_chunks: []const Index = &.{},
|
||||
};
|
||||
|
||||
pub fn init(options: Options) OutputFile {
|
||||
@@ -240,7 +240,7 @@ pub fn init(options: Options) OutputFile {
|
||||
},
|
||||
.side = options.side,
|
||||
.entry_point_index = options.entry_point_index,
|
||||
.referenced_css_files = options.referenced_css_files,
|
||||
.referenced_css_chunks = options.referenced_css_chunks,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -458,9 +458,9 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
else => {},
|
||||
}
|
||||
var file_count: u32 = 1;
|
||||
var css_file_count: u32 = @intCast(main_file.referenced_css_files.len);
|
||||
var css_file_count: u32 = @intCast(main_file.referenced_css_chunks.len);
|
||||
if (route.file_layout.unwrap()) |file| {
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
|
||||
file_count += 1;
|
||||
}
|
||||
var next: ?FrameworkRouter.Route.Index = route.parent.unwrap();
|
||||
@@ -480,7 +480,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
else => {},
|
||||
}
|
||||
if (parent.file_layout.unwrap()) |file| {
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_files.len);
|
||||
css_file_count += @intCast(pt.outputFile(file).referenced_css_chunks.len);
|
||||
file_count += 1;
|
||||
}
|
||||
next = parent.parent.unwrap();
|
||||
@@ -494,14 +494,15 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
file_count = 1;
|
||||
css_file_count = 0;
|
||||
file_list.putIndex(global, 0, pt.preloadBundledModule(main_file_route_index));
|
||||
for (main_file.referenced_css_files) |ref| {
|
||||
for (main_file.referenced_css_chunks) |ref| {
|
||||
const file = bundled_outputs[ref.get()];
|
||||
bun.assert(ref.get() >= css_chunks_first);
|
||||
styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
if (route.file_layout.unwrap()) |file| {
|
||||
file_list.putIndex(global, file_count, pt.preloadBundledModule(file));
|
||||
for (pt.outputFile(file).referenced_css_files) |ref| {
|
||||
for (pt.outputFile(file).referenced_css_chunks) |ref| {
|
||||
styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
@@ -512,7 +513,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
const parent = router.routePtr(parent_index);
|
||||
if (parent.file_layout.unwrap()) |file| {
|
||||
file_list.putIndex(global, file_count, pt.preloadBundledModule(file));
|
||||
for (pt.outputFile(file).referenced_css_files) |ref| {
|
||||
for (pt.outputFile(file).referenced_css_chunks) |ref| {
|
||||
styles.putIndex(global, css_file_count, css_chunk_js_strings[ref.get() - css_chunks_first]);
|
||||
css_file_count += 1;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ pub const LinkerContext = struct {
|
||||
pub const debug = Output.scoped(.LinkerCtx, false);
|
||||
pub const CompileResult = bundler.CompileResult;
|
||||
|
||||
pub const OutputFileListBuilder = @import("./linker_context/OutputFileListBuilder.zig");
|
||||
|
||||
parse_graph: *Graph = undefined,
|
||||
graph: LinkerGraph = undefined,
|
||||
allocator: std.mem.Allocator = undefined,
|
||||
|
||||
133
src/bundler/linker_context/OutputFileListBuilder.zig
Normal file
133
src/bundler/linker_context/OutputFileListBuilder.zig
Normal file
@@ -0,0 +1,133 @@
|
||||
//! Q: What does this struct do?
|
||||
//! A: This struct segments the `OutputFile` list into 3 separate spaces so
|
||||
//! chunk indexing remains the same:
|
||||
//!
|
||||
//! 1. chunks
|
||||
//! 2. sourcemaps and bytecode
|
||||
//! 3. additional output files
|
||||
//!
|
||||
//! We can calculate the space ahead of time and avoid having to do something
|
||||
//! more complicated or which requires extra work.
|
||||
//!
|
||||
//! Q: Why does it need to do that?
|
||||
//! A: We would like it so if we have a chunk index, we can also index its
|
||||
//! corresponding output file in the output file list.
|
||||
//!
|
||||
//! The DevServer uses the `referenced_css_chunks` (a list of chunk indices)
|
||||
//! field on `OutputFile` to know which CSS files to hand to the rendering
|
||||
//! function. For React this just adds <link> tags that point to each output CSS
|
||||
//! file.
|
||||
//!
|
||||
//! However, we previously were pushing sourcemaps and bytecode output files
|
||||
//! to the output file list directly after their corresponding chunk, meaning
|
||||
//! the index of the chunk in the chunk list and its corresponding
|
||||
//! `OutputFile` in the output file list got scrambled.
|
||||
//!
|
||||
//! If we maintain the property that `outputIndexForChunk(chunk[i]) == i`
|
||||
//! then we don't need to do any allocations or extra work to get the output
|
||||
//! file for a chunk.
|
||||
pub const OutputFileList = @This();
|
||||
|
||||
output_files: std.ArrayList(options.OutputFile),
|
||||
index_for_chunk: u32,
|
||||
index_for_sourcemaps_and_bytecode: ?u32,
|
||||
additional_output_files_start: u32,
|
||||
|
||||
pub fn init(
|
||||
allocator: std.mem.Allocator,
|
||||
c: *const bun.bundle_v2.LinkerContext,
|
||||
chunks: []const bun.bundle_v2.Chunk,
|
||||
_: usize,
|
||||
) !@This() {
|
||||
const length, const source_map_and_bytecode_count = OutputFileList.calculateOutputFileListCapacity(c, chunks);
|
||||
var output_files = try std.ArrayList(options.OutputFile).initCapacity(
|
||||
allocator,
|
||||
length,
|
||||
);
|
||||
output_files.items.len = length;
|
||||
|
||||
return .{
|
||||
.output_files = output_files,
|
||||
.index_for_chunk = 0,
|
||||
.index_for_sourcemaps_and_bytecode = if (source_map_and_bytecode_count == 0) null else @as(u32, @truncate(chunks.len)),
|
||||
.additional_output_files_start = @as(u32, @intCast(chunks.len)) + source_map_and_bytecode_count,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn take(this: *@This()) std.ArrayList(options.OutputFile) {
|
||||
const list = this.output_files;
|
||||
this.output_files = std.ArrayList(options.OutputFile).init(bun.default_allocator);
|
||||
return list;
|
||||
}
|
||||
|
||||
pub fn calculateOutputFileListCapacity(c: *const bun.bundle_v2.LinkerContext, chunks: []const bun.bundle_v2.Chunk) struct { u32, u32 } {
|
||||
const source_map_count = if (c.options.source_maps.hasExternalFiles()) chunks.len else 0;
|
||||
const bytecode_count = if (c.options.generate_bytecode_cache) bytecode_count: {
|
||||
var bytecode_count: usize = 0;
|
||||
for (chunks) |*chunk| {
|
||||
// TODO: this was the original logic, but it seems like it is
|
||||
// incorrect / does unnecessary work? Leaving it here just in-case,
|
||||
// as it moved from a different file and is not git blame-able.
|
||||
//
|
||||
// const loader: Loader = if (chunk.entry_point.is_entry_point)
|
||||
// c.parse_graph.input_files.items(.loader)[
|
||||
// chunk.entry_point.source_index
|
||||
// ]
|
||||
// else
|
||||
// .js;
|
||||
// if (loader.isJavaScriptLike()) {
|
||||
// bytecode_count += 1;
|
||||
// }
|
||||
|
||||
if (chunk.content == .javascript) {
|
||||
bytecode_count += 1;
|
||||
}
|
||||
}
|
||||
break :bytecode_count bytecode_count;
|
||||
} else 0;
|
||||
|
||||
return .{ @intCast(chunks.len + source_map_count + bytecode_count + c.parse_graph.additional_output_files.items.len), @intCast(source_map_count + bytecode_count) };
|
||||
}
|
||||
|
||||
pub fn insertForChunk(this: *OutputFileList, output_file: options.OutputFile) u32 {
|
||||
const index = this.indexForChunk();
|
||||
bun.assertf(index < this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32), "index ({d}) \\< index_for_sourcemaps_and_bytecode ({d})", .{ index, this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32) });
|
||||
this.output_files.items[index] = output_file;
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn insertForSourcemapOrBytecode(this: *OutputFileList, output_file: options.OutputFile) !u32 {
|
||||
const index = this.indexForSourcemapOrBytecode() orelse return error.NoSourceMapsOrBytecode;
|
||||
bun.assertf(index < this.additional_output_files_start, "index ({d}) \\< additional_output_files_start ({d})", .{ index, this.additional_output_files_start });
|
||||
this.output_files.items[index] = output_file;
|
||||
return index;
|
||||
}
|
||||
|
||||
pub fn insertAdditionalOutputFiles(this: *OutputFileList, additional_output_files: []const options.OutputFile) void {
|
||||
bun.assertf(this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32) < this.additional_output_files_start, "index_for_sourcemaps_and_bytecode ({d}) \\< additional_output_files_start ({d})", .{ this.index_for_sourcemaps_and_bytecode orelse std.math.maxInt(u32), this.additional_output_files_start });
|
||||
bun.copy(
|
||||
options.OutputFile,
|
||||
this.getMutableAdditionalOutputFiles(),
|
||||
additional_output_files,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn getMutableAdditionalOutputFiles(this: *OutputFileList) []options.OutputFile {
|
||||
return this.output_files.items[this.additional_output_files_start..];
|
||||
}
|
||||
|
||||
fn indexForChunk(this: *@This()) u32 {
|
||||
const result = this.index_for_chunk;
|
||||
this.index_for_chunk += 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
fn indexForSourcemapOrBytecode(this: *@This()) ?u32 {
|
||||
const result = this.index_for_sourcemaps_and_bytecode orelse return null;
|
||||
this.index_for_sourcemaps_and_bytecode.? += 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const options = bun.options;
|
||||
@@ -326,11 +326,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
}
|
||||
}
|
||||
|
||||
var output_files = std.ArrayList(options.OutputFile).initCapacity(
|
||||
bun.default_allocator,
|
||||
(if (c.options.source_maps.hasExternalFiles()) chunks.len * 2 else chunks.len) +
|
||||
@as(usize, c.parse_graph.additional_output_files.items.len),
|
||||
) catch unreachable;
|
||||
var output_files = try OutputFileListBuilder.init(bun.default_allocator, c, chunks, c.parse_graph.additional_output_files.items.len);
|
||||
|
||||
const root_path = c.resolver.opts.output_dir;
|
||||
const more_than_one_output = c.parse_graph.additional_output_files.items.len > 0 or c.options.generate_bytecode_cache or (has_css_chunk and has_js_chunk) or (has_html_chunk and (has_js_chunk or has_css_chunk));
|
||||
@@ -346,7 +342,7 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
try c.writeOutputFilesToDisk(root_path, chunks, &output_files);
|
||||
} else {
|
||||
// In-memory build
|
||||
for (chunks) |*chunk| {
|
||||
for (chunks, 0..) |*chunk, chunk_index_in_chunks_list| {
|
||||
var display_size: usize = 0;
|
||||
|
||||
const public_path = if (chunk.is_browser_chunk_from_server_build)
|
||||
@@ -495,14 +491,12 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
};
|
||||
|
||||
const source_map_index: ?u32 = if (sourcemap_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
try output_files.insertForSourcemapOrBytecode(sourcemap_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null and source_map_index != null)
|
||||
@as(u32, @truncate(output_files.items.len + 2))
|
||||
else if (bytecode_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(bytecode_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
@@ -512,7 +506,8 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index].outputKind()
|
||||
else
|
||||
.chunk;
|
||||
try output_files.append(options.OutputFile.init(.{
|
||||
|
||||
const chunk_index = output_files.insertForChunk(options.OutputFile.init(.{
|
||||
.data = .{
|
||||
.buffer = .{
|
||||
.data = code_result.buffer,
|
||||
@@ -539,24 +534,21 @@ pub fn generateChunksInParallel(c: *LinkerContext, chunks: []Chunk, comptime is_
|
||||
chunk.entry_point.source_index - @as(u32, (if (c.framework) |fw| if (fw.server_components != null) 3 else 1 else 1))
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.referenced_css_chunks = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
}));
|
||||
if (sourcemap_output_file) |sourcemap_file| {
|
||||
try output_files.append(sourcemap_file);
|
||||
}
|
||||
if (bytecode_output_file) |bytecode_file| {
|
||||
try output_files.append(bytecode_file);
|
||||
}
|
||||
|
||||
// We want the chunk index to remain the same in `output_files` so the indices in `OutputFile.referenced_css_chunks` work
|
||||
bun.assertf(chunk_index == chunk_index_in_chunks_list, "chunk_index ({d}) != chunk_index_in_chunks_list ({d})", .{ chunk_index, chunk_index_in_chunks_list });
|
||||
}
|
||||
|
||||
try output_files.appendSlice(c.parse_graph.additional_output_files.items);
|
||||
output_files.insertAdditionalOutputFiles(c.parse_graph.additional_output_files.items);
|
||||
}
|
||||
|
||||
return output_files;
|
||||
return output_files.take();
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
@@ -600,3 +592,4 @@ const base64 = bun.base64;
|
||||
const JSC = bun.JSC;
|
||||
|
||||
pub const ThreadPoolLib = bun.ThreadPool;
|
||||
const OutputFileListBuilder = bun.bundle_v2.LinkerContext.OutputFileListBuilder;
|
||||
|
||||
@@ -2,7 +2,7 @@ pub fn writeOutputFilesToDisk(
|
||||
c: *LinkerContext,
|
||||
root_path: string,
|
||||
chunks: []Chunk,
|
||||
output_files: *std.ArrayList(options.OutputFile),
|
||||
output_files: *OutputFileListBuilder,
|
||||
) !void {
|
||||
const trace = bun.perf.trace("Bundler.writeOutputFilesToDisk");
|
||||
defer trace.end();
|
||||
@@ -41,7 +41,7 @@ pub fn writeOutputFilesToDisk(
|
||||
var pathbuf: bun.PathBuffer = undefined;
|
||||
const bv2: *bundler.BundleV2 = @fieldParentPtr("linker", c);
|
||||
|
||||
for (chunks) |*chunk| {
|
||||
for (chunks, 0..) |*chunk, chunk_index_in_chunks_list| {
|
||||
const trace2 = bun.perf.trace("Bundler.writeChunkToDisk");
|
||||
defer trace2.end();
|
||||
defer max_heap_allocator.reset();
|
||||
@@ -292,14 +292,12 @@ pub fn writeOutputFilesToDisk(
|
||||
}
|
||||
|
||||
const source_map_index: ?u32 = if (source_map_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
try output_files.insertForSourcemapOrBytecode(source_map_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null and source_map_index != null)
|
||||
@as(u32, @truncate(output_files.items.len + 2))
|
||||
else if (bytecode_output_file != null)
|
||||
@as(u32, @truncate(output_files.items.len + 1))
|
||||
const bytecode_index: ?u32 = if (bytecode_output_file != null)
|
||||
try output_files.insertForSourcemapOrBytecode(bytecode_output_file.?)
|
||||
else
|
||||
null;
|
||||
|
||||
@@ -309,7 +307,8 @@ pub fn writeOutputFilesToDisk(
|
||||
c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index].outputKind()
|
||||
else
|
||||
.chunk;
|
||||
try output_files.append(options.OutputFile.init(.{
|
||||
|
||||
const chunk_index = output_files.insertForChunk(options.OutputFile.init(.{
|
||||
.output_path = bun.default_allocator.dupe(u8, chunk.final_rel_path) catch unreachable,
|
||||
.input_path = input_path,
|
||||
.input_loader = if (chunk.entry_point.is_entry_point)
|
||||
@@ -337,27 +336,19 @@ pub fn writeOutputFilesToDisk(
|
||||
chunk.entry_point.source_index - @as(u32, (if (c.framework) |fw| if (fw.server_components != null) 3 else 1 else 1))
|
||||
else
|
||||
null,
|
||||
.referenced_css_files = switch (chunk.content) {
|
||||
.referenced_css_chunks = switch (chunk.content) {
|
||||
.javascript => |js| @ptrCast(try bun.default_allocator.dupe(u32, js.css_chunks)),
|
||||
.css => &.{},
|
||||
.html => &.{},
|
||||
},
|
||||
}));
|
||||
|
||||
if (source_map_output_file) |sourcemap_file| {
|
||||
try output_files.append(sourcemap_file);
|
||||
}
|
||||
|
||||
if (bytecode_output_file) |bytecode_file| {
|
||||
try output_files.append(bytecode_file);
|
||||
}
|
||||
// We want the chunk index to remain the same in `output_files` so the indices in `OutputFile.referenced_css_chunks` work
|
||||
bun.assertf(chunk_index == chunk_index_in_chunks_list, "chunk_index ({d}) != chunk_index_in_chunks_list ({d})", .{ chunk_index, chunk_index_in_chunks_list });
|
||||
}
|
||||
|
||||
{
|
||||
const offset = output_files.items.len;
|
||||
output_files.items.len += c.parse_graph.additional_output_files.items.len;
|
||||
|
||||
for (c.parse_graph.additional_output_files.items, output_files.items[offset..][0..c.parse_graph.additional_output_files.items.len]) |*src, *dest| {
|
||||
for (c.parse_graph.additional_output_files.items, output_files.getMutableAdditionalOutputFiles()) |*src, *dest| {
|
||||
const bytes = src.value.buffer.bytes;
|
||||
src.value.buffer.bytes.len = 0;
|
||||
|
||||
@@ -442,3 +433,4 @@ pub const ParseTask = bun.bundle_v2.ParseTask;
|
||||
const Chunk = bundler.Chunk;
|
||||
const cheapPrefixNormalizer = bundler.cheapPrefixNormalizer;
|
||||
const debug = LinkerContext.debug;
|
||||
const OutputFileListBuilder = bun.bundle_v2.LinkerContext.OutputFileListBuilder;
|
||||
|
||||
Reference in New Issue
Block a user