Compare commits

...

14 Commits

Author SHA1 Message Date
Cursor Agent
104c181d03 Fix HTML import manifest generation to include all assets - Updated writeHTMLImportManifest to include all chunks and assets from HTML file - Manifest now includes HTML, JS, CSS, and static files (images, etc.) - Fixed JSON escaping to use bun.js_printer.writeJSONString() - Updated PathTemplate to automatically use fileWithTarget when HTML imports present - Updated tests to match new manifest format 2025-06-08 17:29:15 +00:00
Cursor Agent
0bc88f8a60 Checkpoint before follow-up message 2025-06-08 17:10:32 +00:00
Cursor Agent
305030bb62 Add comprehensive tests for HTML import manifest feature 2025-06-08 17:09:33 +00:00
Cursor Agent
04999f80ad Checkpoint before follow-up message 2025-06-08 17:07:56 +00:00
Cursor Agent
9bb06609bc Remove HTML import tests that don't match the actual implementation 2025-06-08 16:10:15 +00:00
Cursor Agent
33963f4e3d Fix html_imports reference to use parse_graph instead of graph 2025-06-08 15:55:22 +00:00
Cursor Agent
38af765cf2 Fix JSON manifest generation to use proper JSON string escaping 2025-06-08 15:54:03 +00:00
Cursor Agent
ff4bc1e058 Fix HTML import TODOs: proper JSON manifest, target-aware naming, and add tests 2025-06-08 15:36:21 +00:00
Jarred Sumner
bdf793a042 Merge branch 'main' into cursor/enhance-bun-build-for-full-stack-support-e69d 2025-06-08 07:47:33 -07:00
Jarred Sumner
ae3190b78f wip 2025-06-08 07:43:42 -07:00
Jarred Sumner
ceb72cb050 wip 2025-06-08 03:31:05 -07:00
Jarred Sumner
3b07c92738 Remove a memcpy 2025-06-08 03:30:29 -07:00
Jarred Sumner
28981d4b4c more 2025-06-08 02:09:24 -07:00
Jarred Sumner
32ea63ad57 current 2025-06-08 01:35:47 -07:00
16 changed files with 740 additions and 162 deletions

View File

@@ -4858,11 +4858,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
// Additionally, clear the cached entry of the file from the path to
// source index map.
const hash = bun.hash(abs_path);
for ([_]*bun.bundle_v2.PathToSourceIndexMap{
&bv2.graph.path_to_source_index_map,
&bv2.graph.client_path_to_source_index_map,
&bv2.graph.ssr_path_to_source_index_map,
}) |map| {
for (&bv2.graph.build_graphs.values) |*map| {
_ = map.remove(hash);
}
}

View File

@@ -45,6 +45,7 @@ pub const ResolveMessage = struct {
else
break :brk "ERR_MODULE_NOT_FOUND",
.html_manifest,
.entry_point_run,
.entry_point_build,
.at,

View File

@@ -180,7 +180,7 @@ pub const Chunk = struct {
count += piece.data_len;
switch (piece.query.kind) {
.chunk, .asset, .scb => {
.chunk, .asset, .scb, .html_import => {
const index = piece.query.index;
const file_path = switch (piece.query.kind) {
.asset => brk: {
@@ -195,6 +195,15 @@ pub const Chunk = struct {
},
.chunk => chunks[index].final_rel_path,
.scb => chunks[entry_point_chunks_for_scb[index]].final_rel_path,
.html_import => {
count += std.fmt.count("{}", .{HTMLImportManifest{
.index = index,
.graph = graph,
.chunks = chunks,
.linker_graph = linker_graph,
}});
continue;
},
.none => unreachable,
};
@@ -239,7 +248,7 @@ pub const Chunk = struct {
remain = remain[data.len..];
switch (piece.query.kind) {
.asset, .chunk, .scb => {
.asset, .chunk, .scb, .html_import => {
const index = piece.query.index;
const file_path = switch (piece.query.kind) {
.asset => brk: {
@@ -272,6 +281,19 @@ pub const Chunk = struct {
break :brk piece_chunk.final_rel_path;
},
.html_import => {
var fixed_buffer_stream = std.io.fixedBufferStream(remain);
const writer = fixed_buffer_stream.writer();
try HTMLImportManifest.write(index, graph, linker_graph, chunks, writer);
remain = remain[fixed_buffer_stream.pos..];
if (enable_source_map_shifts) {
shift.before.advance(chunk.unique_key);
shift.after.advance(fixed_buffer_stream.buffer[0..fixed_buffer_stream.pos]);
shifts.appendAssumeCapacity(shift);
}
continue;
},
else => unreachable,
};
@@ -359,6 +381,80 @@ pub const Chunk = struct {
},
}
}
pub const HTMLImportManifest = struct {
index: u32,
graph: *const Graph,
chunks: []Chunk,
linker_graph: *const LinkerGraph,
pub fn format(this: HTMLImportManifest, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
try writeHTMLImportManifest(this.index, this.graph, this.linker_graph, this.chunks, writer);
}
fn writeHTMLImportManifest(index: u32, graph: *const Graph, _: *const LinkerGraph, chunks: []Chunk, writer: anytype) !void {
const browser_source_index = graph.html_imports.html_source_indices.slice()[index];
// Start the manifest object
_ = writer.write("{\"files\":[") catch unreachable;
var first = true;
// Find all chunks that came from this HTML file
for (chunks) |*ch| {
if (ch.entry_point.source_index == browser_source_index) {
if (!first) _ = writer.write(",") catch unreachable;
first = false;
_ = writer.write("{\"path\":") catch unreachable;
bun.js_printer.writeJSONString(ch.final_rel_path, @TypeOf(writer), writer, .utf8) catch unreachable;
_ = writer.write(",\"loader\":\"") catch unreachable;
_ = writer.write(ch.content.ext()) catch unreachable;
_ = writer.write("\",\"hash\":\"") catch unreachable;
_ = writer.print("{}", .{bun.fmt.truncatedHash32(ch.isolated_hash)}) catch unreachable;
_ = writer.write("\"}") catch unreachable;
}
}
// Add additional files (images, fonts, etc.) referenced by the HTML
const import_records = graph.ast.items(.import_records)[browser_source_index];
for (import_records.slice()) |import_record| {
if (import_record.source_index.isValid() and !import_record.is_external_without_side_effects) {
const source_index = import_record.source_index.get();
const loader = graph.input_files.items(.loader)[source_index];
// Skip JS and CSS files as they're handled as chunks above
if (!loader.isJavaScriptLike() and !loader.isCSS()) {
const additional_files = graph.input_files.items(.additional_files)[source_index];
if (additional_files.len > 0) {
switch (additional_files.slice()[0]) {
.output_file => |output_file_id| {
const dest_path = graph.additional_output_files.items[output_file_id].dest_path;
if (!first) _ = writer.write(",") catch unreachable;
first = false;
_ = writer.write("{\"path\":") catch unreachable;
bun.js_printer.writeJSONString(dest_path, @TypeOf(writer), writer, .utf8) catch unreachable;
_ = writer.write(",\"loader\":\"") catch unreachable;
_ = writer.write(@tagName(loader)) catch unreachable;
_ = writer.write("\",\"hash\":\"") catch unreachable;
const content_hash = graph.input_files.items(.content_hash_for_additional_file)[source_index];
_ = writer.print("{}", .{bun.fmt.truncatedHash32(content_hash)}) catch unreachable;
_ = writer.write("\"}") catch unreachable;
},
else => {},
}
}
}
}
}
_ = writer.write("]}") catch unreachable;
}
pub fn write(index: u32, graph: *const Graph, linker_graph: *const LinkerGraph, chunks: []Chunk, writer: anytype) !void {
try writeHTMLImportManifest(index, graph, linker_graph, chunks, writer);
}
};
};
/// An issue with asset files and server component boundaries is they
@@ -385,10 +481,10 @@ pub const Chunk = struct {
}
pub const Query = packed struct(u32) {
index: u30,
index: u29,
kind: Kind,
pub const Kind = enum(u2) {
pub const Kind = enum(u3) {
/// The last piece in an array uses this to indicate it is just data
none,
/// Given a source index, print the asset's output
@@ -397,6 +493,8 @@ pub const Chunk = struct {
chunk,
/// Given a server component boundary index, print the chunk's output path
scb,
/// Given an HTML import index, print the manifest
html_import,
};
pub const none: Query = .{ .index = 0, .kind = .none };

View File

@@ -1,4 +1,4 @@
pub const Graph = @This();
const Graph = @This();
pool: *ThreadPool,
heap: ThreadlocalArena = .{},
@@ -34,32 +34,22 @@ pending_items: u32 = 0,
/// tasks will be run, and the count is "moved" back to `pending_items`
deferred_pending: u32 = 0,
/// Maps a hashed path string to a source index, if it exists in the compilation.
/// Instead of accessing this directly, consider using BundleV2.pathToSourceIndexMap
path_to_source_index_map: PathToSourceIndexMap = .{},
/// When using server components, a completely separate file listing is
/// required to avoid incorrect inlining of defines and dependencies on
/// other files. This is relevant for files shared between server and client
/// and have no "use <side>" directive, and must be duplicated.
///
/// To make linking easier, this second graph contains indices into the
/// same `.ast` and `.input_files` arrays.
client_path_to_source_index_map: PathToSourceIndexMap = .{},
/// When using server components with React, there is an additional module
/// graph which is used to contain SSR-versions of all client components;
/// the SSR graph. The difference between the SSR graph and the server
/// graph is that this one does not apply '--conditions react-server'
///
/// In Bun's React Framework, it includes SSR versions of 'react' and
/// 'react-dom' (an export condition is used to provide a different
/// implementation for RSC, which is potentially how they implement
/// server-only features such as async components).
ssr_path_to_source_index_map: PathToSourceIndexMap = .{},
/// A map of build targets to their corresponding module graphs.
build_graphs: std.EnumArray(options.Target, PathToSourceIndexMap) = .initFill(.{}),
/// When Server Components is enabled, this holds a list of all boundary
/// files. This happens for all files with a "use <side>" directive.
server_component_boundaries: ServerComponentBoundary.List = .{},
/// Track HTML imports from server-side code
/// Each entry represents a server file importing an HTML file that needs a client build
html_imports: struct {
/// Source index of the server file doing the import
server_source_indices: BabyList(Index.Int) = .{},
/// Source index of the HTML file being imported
html_source_indices: BabyList(Index.Int) = .{},
} = .{},
estimated_file_loader_count: usize = 0,
/// For Bake, a count of the CSS asts is used to make precise
@@ -82,11 +72,15 @@ pub const InputFile = struct {
is_plugin_file: bool = false,
};
pub inline fn pathToSourceIndexMap(this: *Graph, target: options.Target) *PathToSourceIndexMap {
return this.build_graphs.getPtr(target);
}
/// Schedule a task to be run on the JS thread which resolves the promise of
/// each `.defer()` called in an onLoad plugin.
///
/// Returns true if there were more tasks queued.
pub fn drainDeferredTasks(this: *@This(), transpiler: *BundleV2) bool {
pub fn drainDeferredTasks(this: *Graph, transpiler: *BundleV2) bool {
transpiler.thread_lock.assertLocked();
if (this.deferred_pending > 0) {

View File

@@ -13,6 +13,7 @@ pub const LinkerContext = struct {
/// We may need to refer to the "__esm" and/or "__commonJS" runtime symbols
cjs_runtime_ref: Ref = Ref.None,
esm_runtime_ref: Ref = Ref.None,
json_parse_ref: Ref = Ref.None,
/// We may need to refer to the CommonJS "module" symbol for exports
unbound_module_ref: Ref = Ref.None,
@@ -214,6 +215,7 @@ pub const LinkerContext = struct {
this.esm_runtime_ref = runtime_named_exports.get("__esm").?.ref;
this.cjs_runtime_ref = runtime_named_exports.get("__commonJS").?.ref;
this.json_parse_ref = runtime_named_exports.get("__jsonParse").?.ref;
if (this.options.output_format == .cjs) {
this.unbound_module_ref = this.graph.generateNewSymbol(Index.runtime.get(), .unbound, "module");
@@ -2355,6 +2357,7 @@ pub const LinkerContext = struct {
'A' => .asset,
'C' => .chunk,
'S' => .scb,
'H' => .html_import,
else => {
if (bun.Environment.isDebug)
bun.Output.debugWarn("Invalid output piece boundary", .{});
@@ -2385,6 +2388,11 @@ pub const LinkerContext = struct {
bun.Output.debugWarn("Invalid output piece boundary", .{});
break;
},
.html_import => if (index >= c.parse_graph.html_imports.server_source_indices.len) {
if (bun.Environment.isDebug)
bun.Output.debugWarn("Invalid output piece boundary", .{});
break;
},
else => unreachable,
}

View File

@@ -1047,22 +1047,12 @@ fn getSourceCode(
const allocator = this.allocator;
var data = this.data;
var transpiler = &data.transpiler;
const transpiler = &data.transpiler;
errdefer transpiler.resetStore();
const resolver: *Resolver = &transpiler.resolver;
var file_path = task.path;
var loader = task.loader orelse file_path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// Do not process files as HTML if any of the following are true:
// - building for node or bun.js
//
// We allow non-entrypoints to import HTML so that people could
// potentially use an onLoad plugin that returns HTML.
if (task.known_target != .browser) {
loader = loader.disableHTML();
task.loader = loader;
}
var contents_came_from_plugin: bool = false;
return try getCodeForParseTask(task, log, transpiler, resolver, allocator, &file_path, &loader, &contents_came_from_plugin);
}
@@ -1076,22 +1066,11 @@ fn runWithSourceCode(
) anyerror!Result.Success {
const allocator = this.allocator;
var data = this.data;
var transpiler = &data.transpiler;
var transpiler = this.transpilerForTarget(task.known_target);
errdefer transpiler.resetStore();
var resolver: *Resolver = &transpiler.resolver;
const file_path = &task.path;
var loader = task.loader orelse file_path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// Do not process files as HTML if any of the following are true:
// - building for node or bun.js
//
// We allow non-entrypoints to import HTML so that people could
// potentially use an onLoad plugin that returns HTML.
if (task.known_target != .browser) {
loader = loader.disableHTML();
task.loader = loader;
}
const loader = task.loader orelse file_path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// WARNING: Do not change the variant of `task.contents_or_fd` from
// `.fd` to `.contents` (or back) after this point!
@@ -1154,7 +1133,7 @@ fn runWithSourceCode(
((transpiler.options.server_components or transpiler.options.dev_server != null) and
task.known_target == .browser))
{
transpiler = this.ctx.client_transpiler;
transpiler = this.ctx.client_transpiler.?;
resolver = &transpiler.resolver;
bun.assert(transpiler.options.target == .browser);
}

View File

@@ -218,6 +218,8 @@ pub const ThreadPool = struct {
estimated_input_lines_of_code: usize = 0,
macro_context: js_ast.Macro.MacroContext,
transpiler: Transpiler = undefined,
other_transpiler: Transpiler = undefined,
has_loaded_other_transpiler: bool = false,
};
pub fn init(worker: *Worker, v2: *BundleV2) void {
@@ -233,7 +235,7 @@ pub const ThreadPool = struct {
this.heap = ThreadlocalArena.init() catch unreachable;
this.allocator = this.heap.allocator();
var allocator = this.allocator;
const allocator = this.allocator;
this.ast_memory_allocator = .{ .allocator = this.allocator };
this.ast_memory_allocator.reset();
@@ -245,21 +247,38 @@ pub const ThreadPool = struct {
};
this.data.log.* = Logger.Log.init(allocator);
this.ctx = ctx;
this.data.transpiler = ctx.transpiler.*;
this.data.transpiler.setLog(this.data.log);
this.data.transpiler.setAllocator(allocator);
this.data.transpiler.linker.resolver = &this.data.transpiler.resolver;
this.data.transpiler.macro_context = js_ast.Macro.MacroContext.init(&this.data.transpiler);
this.data.macro_context = this.data.transpiler.macro_context.?;
this.temporary_arena = bun.ArenaAllocator.init(this.allocator);
this.stmt_list = LinkerContext.StmtList.init(this.allocator);
this.initializeTranspiler(&this.data.transpiler, ctx.transpiler, allocator);
const CacheSet = @import("../cache.zig");
this.data.transpiler.resolver.caches = CacheSet.Set.init(this.allocator);
debug("Worker.create()", .{});
}
fn initializeTranspiler(this: *Worker, transpiler: *Transpiler, from: *Transpiler, allocator: std.mem.Allocator) void {
transpiler.* = from.*;
transpiler.setLog(this.data.log);
transpiler.setAllocator(allocator);
transpiler.linker.resolver = &transpiler.resolver;
transpiler.macro_context = js_ast.Macro.MacroContext.init(transpiler);
this.data.macro_context = transpiler.macro_context.?;
const CacheSet = @import("../cache.zig");
transpiler.resolver.caches = CacheSet.Set.init(allocator);
}
pub fn transpilerForTarget(this: *Worker, target: bun.options.Target) *Transpiler {
if (target == .browser and this.data.transpiler.options.target != target) {
if (!this.data.has_loaded_other_transpiler) {
this.data.has_loaded_other_transpiler = true;
this.initializeTranspiler(&this.data.other_transpiler, this.ctx.client_transpiler.?, this.allocator);
}
bun.debugAssert(this.data.other_transpiler.options.target == target);
return &this.data.other_transpiler;
}
return &this.data.transpiler;
}
pub fn run(this: *Worker, ctx: *BundleV2) void {
if (!this.has_created) {
this.create(ctx);

View File

@@ -106,7 +106,7 @@ pub const BundleV2 = struct {
transpiler: *Transpiler,
/// When Server Component is enabled, this is used for the client bundles
/// and `transpiler` is used for the server bundles.
client_transpiler: *Transpiler,
client_transpiler: ?*Transpiler,
/// See bake.Framework.ServerComponents.separate_ssr_graph
ssr_transpiler: *Transpiler,
/// When Bun Bake is used, the resolved framework is passed here
@@ -167,17 +167,69 @@ pub const BundleV2 = struct {
}
}
fn ensureClientTranspiler(this: *BundleV2) void {
if (this.client_transpiler == null) {
_ = this.initializeClientTranspiler() catch bun.outOfMemory();
}
}
fn initializeClientTranspiler(this: *BundleV2) !*Transpiler {
@branchHint(.cold);
const allocator = this.graph.allocator;
const this_transpiler = this.transpiler;
const client_transpiler = try allocator.create(Transpiler);
const defines = this_transpiler.options.transform_options.define;
client_transpiler.* = this_transpiler.*;
client_transpiler.options = this_transpiler.options;
client_transpiler.options.target = .browser;
client_transpiler.options.main_fields = options.Target.DefaultMainFields.get(options.Target.browser);
client_transpiler.options.conditions = try options.ESMConditions.init(allocator, options.Target.browser.defaultConditions());
client_transpiler.options.define = try options.Define.init(
allocator,
if (defines) |user_defines|
try options.Define.Data.fromInput(try options.stringHashMapFromArrays(
options.defines.RawDefines,
allocator,
user_defines.keys,
user_defines.values,
), this_transpiler.options.transform_options.drop, this_transpiler.log, allocator)
else
null,
null,
this_transpiler.options.define.drop_debugger,
);
client_transpiler.setLog(this_transpiler.log);
client_transpiler.setAllocator(allocator);
client_transpiler.linker.resolver = &client_transpiler.resolver;
client_transpiler.macro_context = js_ast.Macro.MacroContext.init(client_transpiler);
const CacheSet = @import("../cache.zig");
client_transpiler.resolver.caches = CacheSet.Set.init(allocator);
client_transpiler.resolver.opts = client_transpiler.options;
this.client_transpiler = client_transpiler;
return client_transpiler;
}
/// Most of the time, accessing .transpiler directly is OK. This is only
/// needed when it is important to distinct between client and server
///
/// Note that .log, .allocator, and other things are shared
/// between the three transpiler configurations
pub inline fn transpilerForTarget(this: *BundleV2, target: options.Target) *Transpiler {
return if (!this.transpiler.options.server_components and this.linker.dev_server == null)
this.transpiler
else switch (target) {
pub inline fn transpilerForTarget(noalias this: *BundleV2, target: options.Target) *Transpiler {
if (!this.transpiler.options.server_components and this.linker.dev_server == null) {
if (target == .browser and this.transpiler.options.target.isServerSide()) {
return this.client_transpiler orelse this.initializeClientTranspiler() catch bun.outOfMemory();
}
return this.transpiler;
}
return switch (target) {
else => this.transpiler,
.browser => this.client_transpiler,
.browser => this.client_transpiler.?,
.bake_server_components_ssr => this.ssr_transpiler,
};
}
@@ -192,15 +244,8 @@ pub const BundleV2 = struct {
return this.transpiler.log;
}
/// Same semantics as bundlerForTarget for `path_to_source_index_map`
pub inline fn pathToSourceIndexMap(this: *BundleV2, target: options.Target) *PathToSourceIndexMap {
return if (!this.transpiler.options.server_components)
&this.graph.path_to_source_index_map
else switch (target) {
else => &this.graph.path_to_source_index_map,
.browser => &this.graph.client_path_to_source_index_map,
.bake_server_components_ssr => &this.graph.ssr_path_to_source_index_map,
};
return this.graph.pathToSourceIndexMap(target);
}
const ReachableFileVisitor = struct {
@@ -340,7 +385,7 @@ pub const BundleV2 = struct {
.all_import_records = this.graph.ast.items(.import_records),
.all_loaders = this.graph.input_files.items(.loader),
.all_urls_for_css = all_urls_for_css,
.redirect_map = this.graph.path_to_source_index_map,
.redirect_map = this.pathToSourceIndexMap(this.transpiler.options.target).*,
.dynamic_import_entry_points = &this.dynamic_import_entry_points,
.scb_bitset = scb_bitset,
.scb_list = if (scb_bitset != null)
@@ -556,14 +601,14 @@ pub const BundleV2 = struct {
const entry = this.pathToSourceIndexMap(target).getOrPut(this.graph.allocator, path.hashKey()) catch bun.outOfMemory();
if (!entry.found_existing) {
path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory();
const loader: Loader = (brk: {
const loader: Loader = brk: {
const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index];
if (record.loader) |out_loader| {
break :brk out_loader;
}
break :brk path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// HTML is only allowed at the entry point.
}).disableHTML();
};
const idx = this.enqueueParseTask(
&resolve_result,
&.{
@@ -581,9 +626,9 @@ pub const BundleV2 = struct {
// It makes sense to separate these for JS because the target affects DCE
if (this.transpiler.options.server_components and !loader.isJavaScriptLike()) {
const a, const b = switch (target) {
else => .{ &this.graph.client_path_to_source_index_map, &this.graph.ssr_path_to_source_index_map },
.browser => .{ &this.graph.path_to_source_index_map, &this.graph.ssr_path_to_source_index_map },
.bake_server_components_ssr => .{ &this.graph.path_to_source_index_map, &this.graph.client_path_to_source_index_map },
else => .{ this.pathToSourceIndexMap(.browser), this.pathToSourceIndexMap(.bake_server_components_ssr) },
.browser => .{ this.pathToSourceIndexMap(this.transpiler.options.target), this.pathToSourceIndexMap(.bake_server_components_ssr) },
.bake_server_components_ssr => .{ this.pathToSourceIndexMap(this.transpiler.options.target), this.pathToSourceIndexMap(.browser) },
};
a.put(this.graph.allocator, entry.key_ptr.*, entry.value_ptr.*) catch bun.outOfMemory();
if (this.framework.?.server_components.?.separate_ssr_graph)
@@ -675,9 +720,9 @@ pub const BundleV2 = struct {
}
this.incrementScanCounter();
const source_index = Index.source(this.graph.input_files.len);
const loader = brk: {
const loader = path.loader(&this.transpiler.options.loaders) orelse .file;
if (target != .browser) break :brk loader.disableHTML();
break :brk loader;
};
@@ -746,7 +791,7 @@ pub const BundleV2 = struct {
this.* = .{
.transpiler = transpiler,
.client_transpiler = transpiler,
.client_transpiler = null,
.ssr_transpiler = transpiler,
.framework = null,
.graph = .{
@@ -775,7 +820,7 @@ pub const BundleV2 = struct {
this.linker.framework = &this.framework.?;
this.plugins = bo.plugins;
if (transpiler.options.server_components) {
bun.assert(this.client_transpiler.options.server_components);
bun.assert(this.client_transpiler.?.options.server_components);
if (bo.framework.server_components.?.separate_ssr_graph)
bun.assert(this.ssr_transpiler.options.server_components);
}
@@ -882,7 +927,7 @@ pub const BundleV2 = struct {
// try this.graph.entry_points.append(allocator, Index.runtime);
try this.graph.ast.append(bun.default_allocator, JSAst.empty);
try this.graph.path_to_source_index_map.put(this.graph.allocator, bun.hash("bun:wrap"), Index.runtime.get());
try this.pathToSourceIndexMap(this.transpiler.options.target).put(this.graph.allocator, bun.hash("bun:wrap"), Index.runtime.get());
var runtime_parse_task = try this.graph.allocator.create(ParseTask);
runtime_parse_task.* = rt.parse_task;
runtime_parse_task.ctx = this;
@@ -912,7 +957,6 @@ pub const BundleV2 = struct {
try this.graph.entry_points.ensureUnusedCapacity(this.graph.allocator, num_entry_points);
try this.graph.input_files.ensureUnusedCapacity(this.graph.allocator, num_entry_points);
try this.graph.path_to_source_index_map.ensureUnusedCapacity(this.graph.allocator, @intCast(num_entry_points));
switch (variant) {
.normal => {
@@ -920,7 +964,27 @@ pub const BundleV2 = struct {
const resolved = this.transpiler.resolveEntryPoint(entry_point) catch
continue;
_ = try this.enqueueEntryItem(null, resolved, true, this.transpiler.options.target);
_ = try this.enqueueEntryItem(
null,
resolved,
true,
brk: {
const main_target = this.transpiler.options.target;
if (main_target.isServerSide()) {
if (resolved.pathConst()) |path| {
if (path.loader(&this.transpiler.options.loaders)) |loader| {
if (loader == .html) {
this.ensureClientTranspiler();
break :brk .browser;
}
}
}
}
break :brk main_target;
},
);
}
},
.dev_server => {
@@ -991,6 +1055,20 @@ pub const BundleV2 = struct {
}
}
pub fn processHtmlImportFiles(this: *BundleV2) OOM!void {
if (this.graph.html_imports.server_source_indices.len == 0) return;
const input_files: []const Logger.Source = this.graph.input_files.items(.source);
for (this.graph.html_imports.server_source_indices.slice()) |html_import| {
const source = &input_files[html_import];
const source_index = this.pathToSourceIndexMap(.browser).get(source.path.hashKey()) orelse {
@panic("Assertion failed: HTML import file not found in pathToSourceIndexMap");
};
this.graph.html_imports.html_source_indices.push(this.graph.allocator, source_index) catch unreachable;
}
}
/// This generates the two asts for 'bun:bake/client' and 'bun:bake/server'. Both are generated
/// at the same time in one pass over the SCB list.
pub fn processServerComponentManifestFiles(this: *BundleV2) OOM!void {
@@ -1127,15 +1205,13 @@ pub const BundleV2 = struct {
pub fn enqueueParseTask(
this: *BundleV2,
resolve_result: *const _resolver.Result,
noalias resolve_result: *const _resolver.Result,
source: *const Logger.Source,
loader_: Loader,
loader: Loader,
known_target: options.Target,
) OOM!Index.Int {
const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
// Only enable HTML loader when it's an entry point.
const loader = loader_.disableHTML();
this.graph.input_files.append(bun.default_allocator, .{
.source = source.*,
@@ -1344,6 +1420,7 @@ pub const BundleV2 = struct {
}
try this.processServerComponentManifestFiles();
try this.processHtmlImportFiles();
const reachable_files = try this.findReachableFiles();
reachable_files_count.* = reachable_files.len -| 1; // - 1 for the runtime
@@ -1405,6 +1482,7 @@ pub const BundleV2 = struct {
}
try this.processServerComponentManifestFiles();
try this.processHtmlImportFiles();
const reachable_files = try this.findReachableFiles();
@@ -2066,7 +2144,6 @@ pub const BundleV2 = struct {
task.io_task.node.next = null;
this.incrementScanCounter();
// Handle onLoad plugins
if (!this.enqueueOnLoadPluginIfNeeded(task)) {
if (loader.shouldCopyForBundling()) {
var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
@@ -2177,6 +2254,7 @@ pub const BundleV2 = struct {
}
try this.processServerComponentManifestFiles();
try this.processHtmlImportFiles();
this.graph.heap.helpCatchMemoryIssues();
@@ -2728,7 +2806,7 @@ pub const BundleV2 = struct {
continue;
}
const transpiler, const bake_graph: bake.Graph, const target =
const transpiler: *Transpiler, const bake_graph: bake.Graph, const target: options.Target =
if (import_record.tag == .bake_resolve_to_ssr_graph) brk: {
if (this.framework == null) {
this.logForResolutionFailures(source.path.text, .ssr).addErrorFmt(
@@ -2766,7 +2844,7 @@ pub const BundleV2 = struct {
};
var had_busted_dir_cache = false;
var resolve_result = inner: while (true) break transpiler.resolver.resolveWithFramework(
var resolve_result: _resolver.Result = inner: while (true) break transpiler.resolver.resolveWithFramework(
source_dir,
import_record.path.text,
import_record.kind,
@@ -2972,15 +3050,24 @@ pub const BundleV2 = struct {
const hash_key = path.hashKey();
const import_record_loader = import_record.loader orelse path.loader(&transpiler.options.loaders) orelse .file;
import_record.loader = import_record_loader;
const is_html_entrypoint = import_record_loader == .html and target.isServerSide() and this.transpiler.options.dev_server == null;
if (this.pathToSourceIndexMap(target).get(hash_key)) |id| {
if (this.transpiler.options.dev_server != null and loader != .html) {
import_record.path = this.graph.input_files.items(.source)[id].path;
} else {
import_record.source_index = Index.init(id);
import_record.source_index = .init(id);
}
continue;
}
if (is_html_entrypoint) {
import_record.kind = .html_manifest;
}
const resolve_entry = resolve_queue.getOrPut(hash_key) catch bun.outOfMemory();
if (resolve_entry.found_existing) {
import_record.path = resolve_entry.value_ptr.*.path;
@@ -3001,11 +3088,15 @@ pub const BundleV2 = struct {
import_record.path = path.*;
debug("created ParseTask: {s}", .{path.text});
const resolve_task = bun.default_allocator.create(ParseTask) catch bun.outOfMemory();
resolve_task.* = ParseTask.init(&resolve_result, Index.invalid, this);
resolve_task.secondary_path_for_commonjs_interop = secondary_path_to_copy;
resolve_task.known_target = target;
resolve_task.known_target = if (import_record.kind == .html_manifest)
.browser
else
target;
resolve_task.jsx = resolve_result.jsx;
resolve_task.jsx.development = switch (transpiler.options.force_node_env) {
.development => true,
@@ -3013,24 +3104,56 @@ pub const BundleV2 = struct {
.unspecified => transpiler.options.jsx.development,
};
// Figure out the loader.
{
if (import_record.loader) |l| {
resolve_task.loader = l;
}
if (resolve_task.loader == null) {
resolve_task.loader = path.loader(&this.transpiler.options.loaders);
resolve_task.tree_shaking = this.transpiler.options.tree_shaking;
}
// HTML must be an entry point.
if (resolve_task.loader) |*l| {
l.* = l.disableHTML();
}
}
resolve_task.loader = import_record_loader;
resolve_task.tree_shaking = transpiler.options.tree_shaking;
resolve_entry.value_ptr.* = resolve_task;
if (is_html_entrypoint) {
// 1. Create the ast right here
// 2. Assign teh fake source index
// 3. Add it to the graph
const empty_html_file_source: Logger.Source = .{
.path = path.*,
.index = Index.source(this.graph.input_files.len),
.contents = "",
};
var js_parser_options = bun.js_parser.Parser.Options.init(this.transpilerForTarget(target).options.jsx, .html);
js_parser_options.bundle = true;
const unique_key = std.fmt.allocPrint(this.graph.allocator, "{any}H{d:0>8}", .{
bun.fmt.hexIntLower(this.unique_key),
this.graph.html_imports.server_source_indices.len,
}) catch unreachable;
const ast_for_html_entrypoint = JSAst.init((bun.js_parser.newLazyExportAST(
this.graph.allocator,
this.transpilerForTarget(target).options.define,
js_parser_options,
this.transpilerForTarget(target).log,
Expr.init(
E.String,
E.String{
.data = unique_key,
},
Logger.Loc.Empty,
),
&empty_html_file_source,
"__jsonParse",
) catch unreachable).?);
var fake_input_file = Graph.InputFile{
.source = empty_html_file_source,
.side_effects = .no_side_effects__empty_ast,
};
this.graph.input_files.append(this.graph.allocator, fake_input_file) catch unreachable;
this.graph.ast.append(this.graph.allocator, ast_for_html_entrypoint) catch unreachable;
import_record.source_index = fake_input_file.source.index;
this.pathToSourceIndexMap(target).put(this.graph.allocator, hash_key, fake_input_file.source.index.get()) catch unreachable;
this.graph.html_imports.server_source_indices.push(this.graph.allocator, fake_input_file.source.index.get()) catch unreachable;
this.ensureClientTranspiler();
}
}
if (last_error) |err| {
@@ -3064,24 +3187,23 @@ pub const BundleV2 = struct {
pub fn onParseTaskComplete(parse_result: *ParseTask.Result, this: *BundleV2) void {
const trace = bun.perf.trace("Bundler.onParseTaskComplete");
const graph = &this.graph;
defer trace.end();
if (parse_result.external.function != null) {
const source = switch (parse_result.value) {
inline .empty, .err => |data| data.source_index.get(),
.success => |val| val.source.index.get(),
};
const loader: Loader = this.graph.input_files.items(.loader)[source];
const loader: Loader = graph.input_files.items(.loader)[source];
if (!loader.shouldCopyForBundling()) {
this.finalizers.append(bun.default_allocator, parse_result.external) catch bun.outOfMemory();
} else {
this.graph.input_files.items(.allocator)[source] = ExternalFreeFunctionAllocator.create(parse_result.external.function.?, parse_result.external.ctx.?);
graph.input_files.items(.allocator)[source] = ExternalFreeFunctionAllocator.create(parse_result.external.function.?, parse_result.external.ctx.?);
}
}
defer bun.default_allocator.destroy(parse_result);
const graph = &this.graph;
var diff: i32 = -1;
defer {
logScanCounter("in parse task .pending_items += {d} = {d}\n", .{ diff, @as(i32, @intCast(graph.pending_items)) + diff });
@@ -3090,7 +3212,7 @@ pub const BundleV2 = struct {
this.onAfterDecrementScanCounter();
}
var resolve_queue = ResolveQueue.init(this.graph.allocator);
var resolve_queue = ResolveQueue.init(graph.allocator);
defer resolve_queue.deinit();
var process_log = true;
@@ -3172,17 +3294,23 @@ pub const BundleV2 = struct {
var iter = resolve_queue.iterator();
const path_to_source_index_map = this.pathToSourceIndexMap(result.ast.target);
const original_target = result.ast.target;
while (iter.next()) |entry| {
const hash = entry.key_ptr.*;
const value = entry.value_ptr.*;
const value: *ParseTask = entry.value_ptr.*;
var existing = path_to_source_index_map.getOrPut(graph.allocator, hash) catch unreachable;
const loader = value.loader orelse value.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
const is_html_entrypoint = loader == .html and original_target.isServerSide() and this.transpiler.options.dev_server == null;
const map = if (is_html_entrypoint) this.pathToSourceIndexMap(.browser) else path_to_source_index_map;
var existing = map.getOrPut(graph.allocator, hash) catch unreachable;
// If the same file is imported and required, and those point to different files
// Automatically rewrite it to the secondary one
if (value.secondary_path_for_commonjs_interop) |secondary_path| {
const secondary_hash = secondary_path.hashKey();
if (path_to_source_index_map.get(secondary_hash)) |secondary| {
if (map.get(secondary_hash)) |secondary| {
existing.found_existing = true;
existing.value_ptr.* = secondary;
}
@@ -3195,21 +3323,24 @@ pub const BundleV2 = struct {
.side_effects = value.side_effects,
};
const loader = new_task.loader orelse new_input_file.source.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
new_input_file.source.index = Index.source(graph.input_files.len);
new_input_file.source.path = new_task.path;
// We need to ensure the loader is set or else importstar_ts/ReExportTypeOnlyFileES6 will fail.
new_input_file.loader = loader;
existing.value_ptr.* = new_input_file.source.index.get();
new_task.source_index = new_input_file.source.index;
new_task.ctx = this;
existing.value_ptr.* = new_task.source_index.get();
diff += 1;
graph.input_files.append(bun.default_allocator, new_input_file) catch unreachable;
graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
diff += 1;
if (is_html_entrypoint) {
this.ensureClientTranspiler();
this.graph.entry_points.append(this.graph.allocator, new_input_file.source.index) catch unreachable;
}
if (this.enqueueOnLoadPluginIfNeeded(new_task)) {
continue;
@@ -3217,20 +3348,16 @@ pub const BundleV2 = struct {
if (loader.shouldCopyForBundling()) {
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()];
additional_files.push(this.graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable;
additional_files.push(graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable;
new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data;
graph.estimated_file_loader_count += 1;
}
graph.pool.schedule(new_task);
} else {
const loader = value.loader orelse
graph.input_files.items(.source)[existing.value_ptr.*].path.loader(&this.transpiler.options.loaders) orelse
options.Loader.file;
if (loader.shouldCopyForBundling()) {
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()];
additional_files.push(this.graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable;
additional_files.push(graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable;
graph.estimated_file_loader_count += 1;
}
@@ -3238,9 +3365,9 @@ pub const BundleV2 = struct {
}
}
var import_records = result.ast.import_records.clone(this.graph.allocator) catch unreachable;
var import_records = result.ast.import_records.clone(graph.allocator) catch unreachable;
const input_file_loaders = this.graph.input_files.items(.loader);
const input_file_loaders = graph.input_files.items(.loader);
const save_import_record_source_index = this.transpiler.options.dev_server == null or
result.loader == .html or
result.loader.isCSS();
@@ -3255,11 +3382,11 @@ pub const BundleV2 = struct {
}
var list = pending_entry.value.list();
list.deinit(this.graph.allocator);
list.deinit(graph.allocator);
}
if (result.ast.css != null) {
this.graph.css_file_count += 1;
graph.css_file_count += 1;
}
for (import_records.slice(), 0..) |*record, i| {
@@ -3325,7 +3452,7 @@ pub const BundleV2 = struct {
break :brk .{ server_index, Index.invalid.get() };
};
this.graph.path_to_source_index_map.put(
graph.pathToSourceIndexMap(result.ast.target).put(
graph.allocator,
result.source.path.hashKey(),
reference_source_index,
@@ -3350,7 +3477,7 @@ pub const BundleV2 = struct {
dev_server.handleParseTaskFailure(
err.err,
err.target.bakeGraph(),
this.graph.input_files.items(.source)[err.source_index.get()].path.text,
graph.input_files.items(.source)[err.source_index.get()].path.text,
&err.log,
this,
) catch bun.outOfMemory();
@@ -3368,7 +3495,7 @@ pub const BundleV2 = struct {
}
if (Environment.allow_assert and this.transpiler.options.dev_server != null) {
bun.assert(this.graph.ast.items(.parts)[err.source_index.get()].len == 0);
bun.assert(graph.ast.items(.parts)[err.source_index.get()].len == 0);
}
},
}
@@ -4051,4 +4178,4 @@ pub const ThreadPool = @import("ThreadPool.zig").ThreadPool;
pub const ParseTask = @import("ParseTask.zig").ParseTask;
pub const LinkerContext = @import("LinkerContext.zig").LinkerContext;
pub const LinkerGraph = @import("LinkerGraph.zig").LinkerGraph;
pub const Graph = @import("Graph.zig").Graph;
pub const Graph = @import("Graph.zig");

View File

@@ -316,9 +316,14 @@ pub noinline fn computeChunks(
if (chunk.entry_point.is_entry_point and
(chunk.content == .html or (kinds[chunk.entry_point.source_index] == .user_specified and !chunk.has_html_chunk)))
{
chunk.template = PathTemplate.file;
if (this.resolver.opts.entry_naming.len > 0)
chunk.template.data = this.resolver.opts.entry_naming;
// Use fileWithTarget template if there are HTML imports and user hasn't manually set naming
if (this.parse_graph.html_imports.server_source_indices.len > 0 and this.resolver.opts.entry_naming.len == 0) {
chunk.template = PathTemplate.fileWithTarget;
} else {
chunk.template = PathTemplate.file;
if (this.resolver.opts.entry_naming.len > 0)
chunk.template.data = this.resolver.opts.entry_naming;
}
} else {
chunk.template = PathTemplate.chunk;
if (this.resolver.opts.chunk_naming.len > 0)
@@ -329,6 +334,17 @@ pub noinline fn computeChunks(
chunk.template.placeholder.name = pathname.base;
chunk.template.placeholder.ext = chunk.content.ext();
// Determine the target from the AST of the entry point source
const ast_targets = this.graph.ast.items(.target);
const chunk_target = ast_targets[chunk.entry_point.source_index];
chunk.template.placeholder.target = switch (chunk_target) {
.browser => "browser",
.bun => "bun",
.node => "node",
.bun_macro => "macro",
.bake_server_components_ssr => "ssr",
};
// this if check is a specific fix for `bun build hi.ts --external '*'`, without leading `./`
const dir_path = if (pathname.dir.len > 0) pathname.dir else ".";

View File

@@ -31,7 +31,7 @@ pub fn BodyReaderMixin(
};
}
fn onAborted(mixin: *Mixin, _: Response) void {
mixin.body.deinit();
mixin.body.clearAndFree();
onError(@fieldParentPtr(field, mixin));
}
};
@@ -41,32 +41,47 @@ pub fn BodyReaderMixin(
fn onData(ctx: *@This(), resp: uws.AnyResponse, chunk: []const u8, last: bool) !void {
if (last) {
var body = ctx.body; // stack copy so onBody can free everything
resp.clearAborted();
// Free everything after
var body = ctx.body;
ctx.body = .init(ctx.body.allocator);
resp.clearOnData();
if (body.items.len > 0) {
try body.appendSlice(chunk);
try onBody(@fieldParentPtr(field, ctx), ctx.body.items, resp);
try onBody(@fieldParentPtr(field, ctx), body.items, resp);
} else {
try onBody(@fieldParentPtr(field, ctx), chunk, resp);
}
body.deinit();
} else {
try ctx.body.appendSlice(chunk);
}
}
fn onOOM(ctx: *@This(), r: uws.AnyResponse) void {
var body = ctx.body;
ctx.body = .init(ctx.body.allocator);
body.deinit();
r.clearAborted();
r.clearOnData();
r.clearOnWritable();
r.writeStatus("500 Internal Server Error");
r.endWithoutBody(false);
ctx.body.deinit();
onError(@fieldParentPtr(field, ctx));
}
fn onInvalid(ctx: *@This(), r: uws.AnyResponse) void {
var body = ctx.body;
ctx.body = .init(body.allocator);
body.deinit();
r.clearAborted();
r.clearOnData();
r.clearOnWritable();
r.writeStatus("400 Bad Request");
r.endWithoutBody(false);
ctx.body.deinit();
onError(@fieldParentPtr(field, ctx));
}
};

View File

@@ -27,7 +27,9 @@ pub const ImportKind = enum(u8) {
/// A CSS "composes" property
composes = 9,
internal = 10,
html_manifest = 10,
internal = 11,
pub const Label = std.EnumArray(ImportKind, []const u8);
pub const all_labels: Label = brk: {
@@ -45,6 +47,7 @@ pub const ImportKind = enum(u8) {
labels.set(ImportKind.url, "url-token");
labels.set(ImportKind.composes, "composes");
labels.set(ImportKind.internal, "internal");
labels.set(ImportKind.html_manifest, "html_manifest");
break :brk labels;
};
@@ -60,6 +63,7 @@ pub const ImportKind = enum(u8) {
labels.set(ImportKind.url, "url()");
labels.set(ImportKind.internal, "<bun internal>");
labels.set(ImportKind.composes, "composes");
labels.set(ImportKind.html_manifest, "HTML import");
break :brk labels;
};

View File

@@ -3080,7 +3080,7 @@ pub const Parser = struct {
if (runtime_api_call.len > 0) {
var args = try p.allocator.alloc(Expr, 1);
args[0] = expr;
final_expr = try p.callRuntime(expr.loc, runtime_api_call, args);
final_expr = p.callRuntime(expr.loc, runtime_api_call, args);
}
const ns_export_part = js_ast.Part{
@@ -3092,7 +3092,7 @@ pub const Parser = struct {
.data = .{
.s_lazy_export = brk: {
const data = try p.allocator.create(Expr.Data);
data.* = expr.data;
data.* = final_expr.data;
break :brk data;
},
},

View File

@@ -2541,6 +2541,7 @@ pub const PathTemplate = struct {
try writer.print("{any}", .{bun.fmt.truncatedHash32(hash)});
}
},
.target => try writeReplacingSlashesOnWindows(writer, self.placeholder.target),
}
remain = remain[end_len + 1 ..];
}
@@ -2553,17 +2554,19 @@ pub const PathTemplate = struct {
name: []const u8 = "",
ext: []const u8 = "",
hash: ?u64 = null,
target: []const u8 = "",
pub const map = bun.ComptimeStringMap(std.meta.FieldEnum(Placeholder), .{
.{ "dir", .dir },
.{ "name", .name },
.{ "ext", .ext },
.{ "hash", .hash },
.{ "target", .target },
});
};
pub const chunk = PathTemplate{
.data = "./chunk-[hash].[ext]",
.data = "./chunk-[hash].[target].[ext]",
.placeholder = .{
.name = "chunk",
.ext = "js",
@@ -2576,6 +2579,11 @@ pub const PathTemplate = struct {
.placeholder = .{},
};
pub const fileWithTarget = PathTemplate{
.data = "[dir]/[name].[target].[ext]",
.placeholder = .{},
};
pub const asset = PathTemplate{
.data = "./[name]-[hash].[ext]",
.placeholder = .{},

View File

@@ -173,3 +173,5 @@ export var __esm = (fn, res) => () => (fn && (res = fn((fn = 0))), res);
// This is used for JSX inlining with React.
export var $$typeof = /* @__PURE__ */ Symbol.for("react.element");
export var __jsonParse = JSON.parse;

View File

@@ -315,6 +315,7 @@ pub const Runtime = struct {
@"$$typeof": ?Ref = null,
__using: ?Ref = null,
__callDispose: ?Ref = null,
__jsonParse: ?Ref = null,
pub const all = [_][]const u8{
"__name",
@@ -330,6 +331,7 @@ pub const Runtime = struct {
"$$typeof",
"__using",
"__callDispose",
"__jsonParse",
};
const all_sorted: [all.len]string = brk: {
@setEvalBranchQuota(1000000);

View File

@@ -843,4 +843,313 @@ body {
api.expectFile("out/" + jsFile).toContain("sourceMappingURL");
},
});
// Test server-side HTML imports with manifest generation
itBundled("html/server-import-basic", {
outdir: "out/",
target: "bun",
files: {
"/server.js": `
import htmlManifest from './template.html';
console.log('HTML manifest:', htmlManifest);
export function getManifest() {
return htmlManifest;
}`,
"/template.html": `
<!DOCTYPE html>
<html>
<head>
<title>Server Template</title>
<link rel="stylesheet" href="./styles.css">
<script src="./client.js"></script>
</head>
<body>
<h1>Server-Side Template</h1>
<img src="./logo.png" alt="Logo">
</body>
</html>`,
"/styles.css": `
body {
background-color: #f0f0f0;
font-family: sans-serif;
}`,
"/client.js": `
console.log('Client-side JavaScript loaded');
document.addEventListener('DOMContentLoaded', () => {
console.log('DOM ready');
});`,
"/logo.png": "fake image content",
},
entryPoints: ["/server.js"],
onAfterBundle(api) {
const serverBundle = api.readFile("out/server.js");
// Server bundle should be fully bundled and contain the manifest
expect(serverBundle).toMatchInlineSnapshot(`
"// @bun
// template.html
var template_default = __jsonParse("{"files":[{"path":"./template-9xtxtpnm.js","loader":"js","hash":"5pvhc24r"},{"path":"./template.html","loader":"html","hash":"tavkx41f"},{"path":"./template-39ryshgf.css","loader":"css","hash":"tm7n96dd"},{"path":"./logo-t6g14bq9.png","loader":"file","hash":"t6g14bq9"}]}");
// server.js
console.log("HTML manifest:", template_default);
function getManifest() {
return template_default;
}
export {
getManifest
};
"
`);
},
});
// Test multiple HTML imports
itBundled("html/server-import-multiple", {
outdir: "out/",
target: "bun",
files: {
"/server.js": `
import homeManifest from './home.html';
import aboutManifest from './about.html';
export function getHome() {
return homeManifest;
}
export function getAbout() {
return aboutManifest;
}`,
"/home.html": `
<!DOCTYPE html>
<html>
<head>
<title>Home Page</title>
<link rel="stylesheet" href="./shared.css">
<link rel="stylesheet" href="./home.css">
<script src="./shared.js"></script>
<script src="./home.js"></script>
</head>
<body>
<h1>Welcome Home</h1>
</body>
</html>`,
"/about.html": `
<!DOCTYPE html>
<html>
<head>
<title>About Page</title>
<link rel="stylesheet" href="./shared.css">
<link rel="stylesheet" href="./about.css">
<script src="./shared.js"></script>
<script src="./about.js"></script>
</head>
<body>
<h1>About Us</h1>
</body>
</html>`,
"/shared.css": `.shared { font-family: Arial; }`,
"/home.css": `.home { color: blue; }`,
"/about.css": `.about { color: green; }`,
"/shared.js": `console.log('Shared JS loaded');`,
"/home.js": `console.log('Home page JS');`,
"/about.js": `console.log('About page JS');`,
},
entryPoints: ["/server.js"],
onAfterBundle(api) {
const serverBundle = api.readFile("out/server.js");
// Should contain two separate manifests
expect(serverBundle).toMatchInlineSnapshot(`
"// @bun
// home.html
var home_default = __jsonParse("{"files":[{"path":"./home-xknw72bg.js","loader":"js","hash":"7wcgtj8w"},{"path":"./home.html","loader":"html","hash":"0x59yy6q"},{"path":"./home-mrjs00yz.css","loader":"css","hash":"jnx9amw6"}]}");
// about.html
var about_default = __jsonParse("{"files":[{"path":"./about-4wt6nb97.js","loader":"js","hash":"e4p5eywy"},{"path":"./about.html","loader":"html","hash":"c8fb09sb"},{"path":"./about-ddd3danw.css","loader":"css","hash":"kfkd1fdt"}]}");
// server.js
function getHome() {
return home_default;
}
function getAbout() {
return about_default;
}
export {
getHome,
getAbout
};
"
`);
},
});
// Test with nested dependencies
itBundled("html/server-import-nested", {
outdir: "out/",
target: "bun",
files: {
"/server.js": `
import pageManifest from './page.html';
export default pageManifest;`,
"/page.html": `
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="./main.css">
<script type="module" src="./main.js"></script>
</head>
<body>
<h1>Page with nested deps</h1>
</body>
</html>`,
"/main.css": `
@import './reset.css';
@import './theme.css';
.main { padding: 20px; }`,
"/reset.css": `
* { margin: 0; padding: 0; }`,
"/theme.css": `
@import './colors.css';
body { font-size: 16px; }`,
"/colors.css": `
:root { --primary: blue; }`,
"/main.js": `
import { utils } from './utils.js';
import { api } from './api.js';
utils.init();
api.setup();`,
"/utils.js": `
import { config } from './config.js';
export const utils = {
init() { console.log('Utils init', config); }
};`,
"/api.js": `
import { config } from './config.js';
export const api = {
setup() { console.log('API setup', config); }
};`,
"/config.js": `
export const config = { version: '1.0' };`,
},
entryPoints: ["/server.js"],
onAfterBundle(api) {
const serverBundle = api.readFile("out/server.js");
// Should include all nested dependencies in the manifest
expect(serverBundle).toMatchInlineSnapshot(`
"// @bun
// page.html
var page_default = __jsonParse("{"files":[{"path":"./page-256naknx.js","loader":"js","hash":"nj9w30ca"},{"path":"./page.html","loader":"html","hash":"f5ddyd05"},{"path":"./page-4btw6d53.css","loader":"css","hash":"1pngv86f"}]}");
// server.js
var server_default = page_default;
export {
server_default as default
};
"
`);
},
});
// Test CLI usage with HTML imports
itBundled("html/server-import-cli", {
outdir: "out/",
target: "bun",
files: {
"/app.js": `
import dashboardHTML from './dashboard.html';
Bun.serve({
port: 3000,
fetch(req) {
return new Response(JSON.stringify(dashboardHTML), {
headers: { 'Content-Type': 'application/json' }
});
}
});`,
"/dashboard.html": `
<!DOCTYPE html>
<html>
<head>
<title>Dashboard</title>
<link rel="stylesheet" href="./dashboard.css">
<script src="./dashboard.js" defer></script>
<link rel="icon" href="./favicon.ico">
</head>
<body>
<h1>Dashboard</h1>
<img src="./chart.svg" alt="Chart">
</body>
</html>`,
"/dashboard.css": `
body { background: #f5f5f5; }
h1 { color: #333; }`,
"/dashboard.js": `
console.log('Dashboard loaded');
fetch('/api/data').then(r => r.json()).then(console.log);`,
"/favicon.ico": "fake favicon",
"/chart.svg": "<svg>fake chart</svg>",
},
entryPoints: ["/app.js"],
onAfterBundle(api) {
const appBundle = api.readFile("out/app.js");
// Should be a complete bundle with all assets in manifest
expect(appBundle).toMatchInlineSnapshot(`
"// @bun
// dashboard.html
var dashboard_default = __jsonParse("{"files":[{"path":"./dashboard-b3qf2b7g.js","loader":"js","hash":"tye2hkpb"},{"path":"./dashboard.html","loader":"html","hash":"gyx24kdp"},{"path":"./dashboard-3fxnqp19.css","loader":"css","hash":"520tsev6"},{"path":"./favicon-pytz1nfb.ico","loader":"file","hash":"pytz1nfb"},{"path":"./chart-9q6p3em7.svg","loader":"file","hash":"9q6p3em7"}]}");
// app.js
Bun.serve({
port: 3000,
fetch(req) {
return new Response(JSON.stringify(dashboard_default), {
headers: { "Content-Type": "application/json" }
});
}
});
"
`);
},
});
// Test that the manifest contains proper metadata
itBundled("html/server-import-metadata", {
outdir: "out/",
target: "bun",
files: {
"/server.js": `
import htmlData from './index.html';
export { htmlData };`,
"/index.html": `
<!DOCTYPE html>
<html>
<head>
<script src="./app.js"></script>
<link rel="stylesheet" href="./app.css">
</head>
<body>
<h1>Test</h1>
</body>
</html>`,
"/app.js": `console.log('App');`,
"/app.css": `body { margin: 0; }`,
},
entryPoints: ["/server.js"],
onAfterBundle(api) {
const serverBundle = api.readFile("out/server.js");
// Each file in manifest should have complete metadata
expect(serverBundle).toMatchInlineSnapshot(`
"// @bun
// index.html
var server_import_metadata_default = __jsonParse("{"files":[{"path":"./index-1zbb1gjs.js","loader":"js","hash":"akfrwy27"},{"path":"./index.html","loader":"html","hash":"pmc0g65g"},{"path":"./index-skz1pf92.css","loader":"css","hash":"fqspnz4x"}]}");
export {
server_import_metadata_default as htmlData
};
"
`);
},
});
});