mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 23:48:52 +00:00
Compare commits
3 Commits
ciro/fix-a
...
jarred/chu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
436819a9b2 | ||
|
|
81e767c7e3 | ||
|
|
606248c662 |
117
packages/bun-types/bun.d.ts
vendored
117
packages/bun-types/bun.d.ts
vendored
@@ -2174,6 +2174,15 @@ declare module "bun" {
|
||||
}; // | string;
|
||||
root?: string; // project root
|
||||
splitting?: boolean; // default true, enable code splitting
|
||||
/**
|
||||
* Controls how entry point signatures are preserved.
|
||||
* - `"strict"`: Entry chunks contain only their direct code
|
||||
* - `"allow-extension"`: Shared modules can be merged into entry chunks (default)
|
||||
* - `"exports-only"`: Only explicitly exported symbols are preserved
|
||||
* - `"false"`: Maximum optimization with no restrictions
|
||||
* @default "allow-extension"
|
||||
*/
|
||||
preserveEntrySignatures?: "strict" | "allow-extension" | "exports-only" | "false";
|
||||
plugins?: BunPlugin[];
|
||||
// manifest?: boolean; // whether to return manifest
|
||||
external?: string[];
|
||||
@@ -2259,6 +2268,114 @@ declare module "bun" {
|
||||
*/
|
||||
emitDCEAnnotations?: boolean;
|
||||
|
||||
/**
|
||||
* Advanced chunking options for custom module grouping and size constraints.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.build({
|
||||
* entrypoints: ["src/index.ts"],
|
||||
* splitting: true,
|
||||
* advancedChunks: {
|
||||
* minShareCount: 2,
|
||||
* minSize: 1024,
|
||||
* groups: [
|
||||
* {
|
||||
* name: "vendor",
|
||||
* test: /node_modules/,
|
||||
* priority: 10,
|
||||
* minSize: 5000
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
advancedChunks?: {
|
||||
/**
|
||||
* Minimum number of entry points that must share a module for it to be in a common chunk.
|
||||
* @default undefined
|
||||
*/
|
||||
minShareCount?: number;
|
||||
|
||||
/**
|
||||
* Minimum size for a chunk in bytes.
|
||||
* @default undefined
|
||||
*/
|
||||
minSize?: number;
|
||||
|
||||
/**
|
||||
* Maximum size for a chunk in bytes.
|
||||
* @default undefined
|
||||
*/
|
||||
maxSize?: number;
|
||||
|
||||
/**
|
||||
* Minimum size for a module to be considered for chunking.
|
||||
* @default undefined
|
||||
*/
|
||||
minModuleSize?: number;
|
||||
|
||||
/**
|
||||
* Maximum size for a module to be included in a chunk.
|
||||
* @default undefined
|
||||
*/
|
||||
maxModuleSize?: number;
|
||||
|
||||
/**
|
||||
* Custom grouping rules for modules.
|
||||
*/
|
||||
groups?: Array<{
|
||||
/**
|
||||
* Name of the group.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Test pattern for matching modules (string will be treated as regex).
|
||||
*/
|
||||
test?: string | RegExp;
|
||||
|
||||
/**
|
||||
* Priority for group matching (higher priority groups are matched first).
|
||||
* @default 0
|
||||
*/
|
||||
priority?: number;
|
||||
|
||||
/**
|
||||
* Type of modules to include.
|
||||
* @default "all"
|
||||
*/
|
||||
type?: "javascript" | "css" | "asset" | "all";
|
||||
|
||||
/**
|
||||
* Minimum size for the group.
|
||||
*/
|
||||
minSize?: number;
|
||||
|
||||
/**
|
||||
* Maximum size for the group.
|
||||
*/
|
||||
maxSize?: number;
|
||||
|
||||
/**
|
||||
* Minimum number of modules in the group.
|
||||
*/
|
||||
minChunks?: number;
|
||||
|
||||
/**
|
||||
* Maximum number of modules in the group.
|
||||
*/
|
||||
maxChunks?: number;
|
||||
|
||||
/**
|
||||
* Enforce creation of this group even if it would be empty.
|
||||
* @default false
|
||||
*/
|
||||
enforce?: boolean;
|
||||
}>;
|
||||
};
|
||||
|
||||
// treeshaking?: boolean;
|
||||
|
||||
// jsx?:
|
||||
|
||||
@@ -1975,6 +1975,153 @@ pub const Api = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const PreserveEntrySignatures = enum(u8) {
|
||||
/// Entry exports are always preserved
|
||||
strict,
|
||||
|
||||
/// Allow adding additional modules to entry chunks (default)
|
||||
allow_extension,
|
||||
|
||||
/// Only the specific exports from the entry module are preserved
|
||||
exports_only,
|
||||
|
||||
/// Entry exports are not preserved, allows maximum optimization
|
||||
false,
|
||||
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
pub const AdvancedChunksOptions = struct {
|
||||
/// Minimum number of entry points that must share a module for it to be in a common chunk
|
||||
min_share_count: ?u32 = null,
|
||||
|
||||
/// Minimum size for a chunk in bytes
|
||||
min_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for a chunk in bytes
|
||||
max_size: ?f64 = null,
|
||||
|
||||
/// Minimum size for a module to be considered for chunking
|
||||
min_module_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for a module to be included in a chunk
|
||||
max_module_size: ?f64 = null,
|
||||
|
||||
/// Custom grouping rules
|
||||
groups: ?[]const MatchGroup = null,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!AdvancedChunksOptions {
|
||||
var this = std.mem.zeroes(AdvancedChunksOptions);
|
||||
this.min_share_count = try reader.readValue(?u32);
|
||||
this.min_size = try reader.readValue(?f64);
|
||||
this.max_size = try reader.readValue(?f64);
|
||||
this.min_module_size = try reader.readValue(?f64);
|
||||
this.max_module_size = try reader.readValue(?f64);
|
||||
if (try reader.readValue(bool)) {
|
||||
this.groups = try reader.readArray(MatchGroup);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
|
||||
try writer.writeValue(@TypeOf(this.min_share_count), this.min_share_count);
|
||||
try writer.writeValue(@TypeOf(this.min_size), this.min_size);
|
||||
try writer.writeValue(@TypeOf(this.max_size), this.max_size);
|
||||
try writer.writeValue(@TypeOf(this.min_module_size), this.min_module_size);
|
||||
try writer.writeValue(@TypeOf(this.max_module_size), this.max_module_size);
|
||||
try writer.writeValue(bool, this.groups != null);
|
||||
if (this.groups) |groups| {
|
||||
try writer.writeArray(MatchGroup, groups);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const MatchGroup = struct {
|
||||
/// Name of the group
|
||||
name: []const u8,
|
||||
|
||||
/// Test pattern (regex or function)
|
||||
test_pattern: ?[]const u8 = null,
|
||||
|
||||
/// Priority for group matching (higher priority groups are matched first)
|
||||
priority: ?u32 = null,
|
||||
|
||||
/// Type of modules to include
|
||||
type_: ?MatchGroupModuleType = null,
|
||||
|
||||
/// Minimum size for the group
|
||||
min_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for the group
|
||||
max_size: ?f64 = null,
|
||||
|
||||
/// Minimum number of modules in the group
|
||||
min_chunks: ?u32 = null,
|
||||
|
||||
/// Maximum number of modules in the group
|
||||
max_chunks: ?u32 = null,
|
||||
|
||||
/// Enforce creation of this group even if it would be empty
|
||||
enforce: bool = false,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!MatchGroup {
|
||||
var this = std.mem.zeroes(MatchGroup);
|
||||
this.name = try reader.readValue([]const u8);
|
||||
if (try reader.readValue(bool)) {
|
||||
this.test_pattern = try reader.readValue([]const u8);
|
||||
}
|
||||
this.priority = try reader.readValue(?u32);
|
||||
if (try reader.readValue(bool)) {
|
||||
this.type_ = try reader.readEnum(MatchGroupModuleType);
|
||||
}
|
||||
this.min_size = try reader.readValue(?f64);
|
||||
this.max_size = try reader.readValue(?f64);
|
||||
this.min_chunks = try reader.readValue(?u32);
|
||||
this.max_chunks = try reader.readValue(?u32);
|
||||
this.enforce = try reader.readValue(bool);
|
||||
return this;
|
||||
}
|
||||
|
||||
pub fn encode(this: *const @This(), writer: anytype) anyerror!void {
|
||||
try writer.writeValue(@TypeOf(this.name), this.name);
|
||||
try writer.writeValue(bool, this.test_pattern != null);
|
||||
if (this.test_pattern) |test_val| {
|
||||
try writer.writeValue(@TypeOf(test_val), test_val);
|
||||
}
|
||||
try writer.writeValue(@TypeOf(this.priority), this.priority);
|
||||
try writer.writeValue(bool, this.type_ != null);
|
||||
if (this.type_) |type_| {
|
||||
try writer.writeInt(@intFromEnum(type_));
|
||||
}
|
||||
try writer.writeValue(@TypeOf(this.min_size), this.min_size);
|
||||
try writer.writeValue(@TypeOf(this.max_size), this.max_size);
|
||||
try writer.writeValue(@TypeOf(this.min_chunks), this.min_chunks);
|
||||
try writer.writeValue(@TypeOf(this.max_chunks), this.max_chunks);
|
||||
try writer.writeValue(@TypeOf(this.enforce), this.enforce);
|
||||
}
|
||||
};
|
||||
|
||||
pub const MatchGroupModuleType = enum(u8) {
|
||||
/// Match only JavaScript modules
|
||||
javascript,
|
||||
/// Match only CSS modules
|
||||
css,
|
||||
/// Match only asset modules
|
||||
asset,
|
||||
/// Match all module types
|
||||
all,
|
||||
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
return try writer.write(@tagName(self));
|
||||
}
|
||||
};
|
||||
|
||||
pub const FileHandle = struct {
|
||||
/// path
|
||||
path: []const u8,
|
||||
|
||||
@@ -22,6 +22,145 @@ const Index = @import("../../ast/base.zig").Index;
|
||||
|
||||
const debug = bun.Output.scoped(.Transpiler, false);
|
||||
|
||||
fn parseAdvancedChunksOptions(globalThis: *JSC.JSGlobalObject, obj: JSC.JSValue, allocator: std.mem.Allocator) !options.AdvancedChunksOptions {
|
||||
var opts = options.AdvancedChunksOptions{};
|
||||
|
||||
if (try obj.get(globalThis, "minShareCount")) |val| {
|
||||
if (!val.isUndefinedOrNull() and val.isNumber()) {
|
||||
const num = val.asNumber();
|
||||
if (num >= 0 and num <= std.math.maxInt(u32)) {
|
||||
opts.min_share_count = @intFromFloat(num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try obj.get(globalThis, "minSize")) |val| {
|
||||
if (!val.isUndefinedOrNull() and val.isNumber()) {
|
||||
opts.min_size = val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try obj.get(globalThis, "maxSize")) |val| {
|
||||
if (!val.isUndefinedOrNull() and val.isNumber()) {
|
||||
opts.max_size = val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try obj.get(globalThis, "minModuleSize")) |val| {
|
||||
if (!val.isUndefinedOrNull() and val.isNumber()) {
|
||||
opts.min_module_size = val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try obj.get(globalThis, "maxModuleSize")) |val| {
|
||||
if (!val.isUndefinedOrNull() and val.isNumber()) {
|
||||
opts.max_module_size = val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try obj.getArray(globalThis, "groups")) |groups_array| {
|
||||
const groups_len = try groups_array.getLength(globalThis);
|
||||
if (groups_len > 0) {
|
||||
var groups = try allocator.alloc(options.MatchGroup, groups_len);
|
||||
|
||||
for (0..groups_len) |i| {
|
||||
const group_val = try groups_array.getIndex(globalThis, @intCast(i));
|
||||
if (!group_val.isObject()) {
|
||||
return globalThis.throwInvalidArguments("advancedChunks.groups[{}] must be an object", .{i});
|
||||
}
|
||||
|
||||
var group = options.MatchGroup{ .name = "", .enforce = false };
|
||||
|
||||
if (try group_val.get(globalThis, "name")) |name_val| {
|
||||
if (name_val.isString()) {
|
||||
const slice = try name_val.toSlice(globalThis, allocator);
|
||||
defer slice.deinit();
|
||||
group.name = try allocator.dupe(u8, slice.slice());
|
||||
} else {
|
||||
return globalThis.throwInvalidArguments("advancedChunks.groups[{}].name must be a string", .{i});
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "test")) |test_val| {
|
||||
if (test_val.isString()) {
|
||||
const slice = try test_val.toSlice(globalThis, allocator);
|
||||
defer slice.deinit();
|
||||
group.test_pattern = try allocator.dupe(u8, slice.slice());
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "priority")) |priority_val| {
|
||||
if (priority_val.isNumber()) {
|
||||
const num = priority_val.asNumber();
|
||||
if (num >= 0 and num <= std.math.maxInt(u32)) {
|
||||
group.priority = @intFromFloat(num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "type")) |type_val| {
|
||||
if (type_val.isString()) {
|
||||
const slice = try type_val.toSlice(globalThis, allocator);
|
||||
defer slice.deinit();
|
||||
const type_str = slice.slice();
|
||||
if (strings.eqlComptime(type_str, "javascript")) {
|
||||
group.type_ = .javascript;
|
||||
} else if (strings.eqlComptime(type_str, "css")) {
|
||||
group.type_ = .css;
|
||||
} else if (strings.eqlComptime(type_str, "asset")) {
|
||||
group.type_ = .asset;
|
||||
} else if (strings.eqlComptime(type_str, "all")) {
|
||||
group.type_ = .all;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "minSize")) |min_size_val| {
|
||||
if (min_size_val.isNumber()) {
|
||||
group.min_size = min_size_val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "maxSize")) |max_size_val| {
|
||||
if (max_size_val.isNumber()) {
|
||||
group.max_size = max_size_val.asNumber();
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "minChunks")) |min_chunks_val| {
|
||||
if (min_chunks_val.isNumber()) {
|
||||
const num = min_chunks_val.asNumber();
|
||||
if (num >= 0 and num <= std.math.maxInt(u32)) {
|
||||
group.min_chunks = @intFromFloat(num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "maxChunks")) |max_chunks_val| {
|
||||
if (max_chunks_val.isNumber()) {
|
||||
const num = max_chunks_val.asNumber();
|
||||
if (num >= 0 and num <= std.math.maxInt(u32)) {
|
||||
group.max_chunks = @intFromFloat(num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try group_val.get(globalThis, "enforce")) |enforce_val| {
|
||||
if (enforce_val.isBoolean()) {
|
||||
group.enforce = enforce_val.toBoolean();
|
||||
}
|
||||
}
|
||||
|
||||
groups[i] = group;
|
||||
}
|
||||
|
||||
opts.groups = groups;
|
||||
}
|
||||
}
|
||||
|
||||
return opts;
|
||||
}
|
||||
|
||||
pub const JSBundler = struct {
|
||||
const OwnedString = bun.MutableString;
|
||||
|
||||
@@ -38,6 +177,8 @@ pub const JSBundler = struct {
|
||||
jsx: options.JSX.Pragma = .{},
|
||||
force_node_env: options.BundleOptions.ForceNodeEnv = .unspecified,
|
||||
code_splitting: bool = false,
|
||||
preserve_entry_signatures: options.PreserveEntrySignatures = .allow_extension,
|
||||
advanced_chunks: ?options.AdvancedChunksOptions = null,
|
||||
minify: Minify = .{},
|
||||
no_macros: bool = false,
|
||||
ignore_dce_annotations: bool = false,
|
||||
@@ -233,6 +374,10 @@ pub const JSBundler = struct {
|
||||
this.packages = packages;
|
||||
}
|
||||
|
||||
if (try config.getOptionalEnum(globalThis, "preserveEntrySignatures", options.PreserveEntrySignatures)) |preserve| {
|
||||
this.preserve_entry_signatures = preserve;
|
||||
}
|
||||
|
||||
if (try config.getOptionalEnum(globalThis, "format", options.Format)) |format| {
|
||||
this.format = format;
|
||||
|
||||
@@ -245,6 +390,16 @@ pub const JSBundler = struct {
|
||||
this.code_splitting = hot;
|
||||
}
|
||||
|
||||
if (try config.getOptionalEnum(globalThis, "preserveEntrySignatures", options.PreserveEntrySignatures)) |preserve| {
|
||||
this.preserve_entry_signatures = preserve;
|
||||
}
|
||||
|
||||
if (try config.get(globalThis, "advancedChunks")) |advanced| {
|
||||
if (!advanced.isUndefinedOrNull()) {
|
||||
this.advanced_chunks = try parseAdvancedChunksOptions(globalThis, advanced, allocator);
|
||||
}
|
||||
}
|
||||
|
||||
if (try config.getTruthy(globalThis, "minify")) |minify| {
|
||||
if (minify.isBoolean()) {
|
||||
const value = minify.toBoolean();
|
||||
@@ -496,6 +651,9 @@ pub const JSBundler = struct {
|
||||
syntax: bool = false,
|
||||
};
|
||||
|
||||
// TODO: Implement AdvancedChunksOptions
|
||||
pub const AdvancedChunksOptions = options.AdvancedChunksOptions;
|
||||
|
||||
pub const Serve = struct {
|
||||
handler_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
prefix: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
@@ -527,6 +685,9 @@ pub const JSBundler = struct {
|
||||
self.conditions.deinit();
|
||||
self.drop.deinit();
|
||||
self.banner.deinit();
|
||||
if (self.advanced_chunks) |*chunks| {
|
||||
chunks.deinit(allocator);
|
||||
}
|
||||
self.env_prefix.deinit();
|
||||
self.footer.deinit();
|
||||
}
|
||||
|
||||
@@ -29,6 +29,9 @@ pub const Chunk = struct {
|
||||
has_html_chunk: bool = false,
|
||||
is_browser_chunk_from_server_build: bool = false,
|
||||
|
||||
/// Controls how exports are generated for entry chunks (Rolldown optimization)
|
||||
preserve_entry_signature: ?options.PreserveEntrySignatures = null,
|
||||
|
||||
output_source_map: sourcemap.SourceMapPieces,
|
||||
|
||||
intermediate_output: IntermediateOutput = .{ .empty = {} },
|
||||
|
||||
@@ -64,6 +64,8 @@ pub const LinkerContext = struct {
|
||||
css_chunking: bool = false,
|
||||
source_maps: options.SourceMapOption = .none,
|
||||
target: options.Target = .browser,
|
||||
preserve_entry_signatures: options.PreserveEntrySignatures = .allow_extension,
|
||||
advanced_chunks: ?options.AdvancedChunksOptions = null,
|
||||
|
||||
mode: Mode = .bundle,
|
||||
|
||||
@@ -1445,6 +1447,11 @@ pub const LinkerContext = struct {
|
||||
if (bits.isSet(entry_points_count) and !traverse_again)
|
||||
return;
|
||||
|
||||
// Increment share_count when setting a new bit (Rolldown optimization)
|
||||
if (!bits.isSet(entry_points_count)) {
|
||||
c.graph.files.items(.share_count)[source_index] += 1;
|
||||
}
|
||||
|
||||
bits.set(entry_points_count);
|
||||
|
||||
if (comptime bun.Environment.enable_logs)
|
||||
|
||||
@@ -408,6 +408,9 @@ pub fn load(
|
||||
pub const File = struct {
|
||||
entry_bits: AutoBitSet = undefined,
|
||||
|
||||
/// Number of entry points that can reach this module (Rolldown optimization)
|
||||
share_count: u32 = 0,
|
||||
|
||||
input_file: Index = Index.source(0),
|
||||
|
||||
/// The minimum number of links in the module graph to get from an entry point
|
||||
|
||||
@@ -859,6 +859,8 @@ pub const BundleV2 = struct {
|
||||
this.linker.options.target = transpiler.options.target;
|
||||
this.linker.options.output_format = transpiler.options.output_format;
|
||||
this.linker.options.generate_bytecode_cache = transpiler.options.bytecode;
|
||||
this.linker.options.preserve_entry_signatures = transpiler.options.preserve_entry_signatures;
|
||||
this.linker.options.advanced_chunks = transpiler.options.advanced_chunks;
|
||||
|
||||
this.linker.dev_server = transpiler.options.dev_server;
|
||||
|
||||
@@ -1740,6 +1742,7 @@ pub const BundleV2 = struct {
|
||||
transpiler.options.source_map = config.source_map;
|
||||
transpiler.options.packages = config.packages;
|
||||
transpiler.options.code_splitting = config.code_splitting;
|
||||
transpiler.options.preserve_entry_signatures = config.preserve_entry_signatures;
|
||||
transpiler.options.emit_dce_annotations = config.emit_dce_annotations orelse !config.minify.whitespace;
|
||||
transpiler.options.ignore_dce_annotations = config.ignore_dce_annotations;
|
||||
transpiler.options.css_chunking = config.css_chunking;
|
||||
|
||||
@@ -122,6 +122,7 @@ pub noinline fn computeChunks(
|
||||
.has_html_chunk = has_html_chunk,
|
||||
.output_source_map = sourcemap.SourceMapPieces.init(this.allocator),
|
||||
.is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser,
|
||||
.preserve_entry_signature = this.options.preserve_entry_signatures,
|
||||
};
|
||||
|
||||
{
|
||||
@@ -183,18 +184,36 @@ pub noinline fn computeChunks(
|
||||
}
|
||||
var file_entry_bits: []AutoBitSet = this.graph.files.items(.entry_bits);
|
||||
|
||||
// Track which modules have been assigned to chunks (Rolldown optimization)
|
||||
var module_to_assigned = try AutoBitSet.initEmpty(this.allocator, this.graph.files.len);
|
||||
defer module_to_assigned.deinit(this.allocator);
|
||||
|
||||
const Handler = struct {
|
||||
chunks: []Chunk,
|
||||
allocator: std.mem.Allocator,
|
||||
source_id: u32,
|
||||
module_to_assigned: *AutoBitSet,
|
||||
|
||||
pub fn next(c: *@This(), chunk_id: usize) void {
|
||||
// Ensure module hasn't been assigned already (Rolldown optimization)
|
||||
if (bun.Environment.allow_assert) {
|
||||
bun.assert(!c.module_to_assigned.isSet(c.source_id));
|
||||
c.module_to_assigned.set(c.source_id);
|
||||
}
|
||||
|
||||
_ = c.chunks[chunk_id].files_with_parts_in_chunk.getOrPut(c.allocator, @as(u32, @truncate(c.source_id))) catch unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
const css_reprs = this.graph.ast.items(.css);
|
||||
|
||||
// Check if we can extend entry chunks (Rolldown optimization)
|
||||
const allow_extension_optimize = this.options.preserve_entry_signatures != .strict;
|
||||
|
||||
// Map to hold modules that might be merged into existing chunks
|
||||
var pending_common_chunks = bun.StringArrayHashMap(BabyList(Index.Int)).init(temp_allocator);
|
||||
defer pending_common_chunks.deinit();
|
||||
|
||||
// Figure out which JS files are in which chunk
|
||||
if (js_chunks.count() > 0) {
|
||||
for (this.graph.reachable_files) |source_index| {
|
||||
@@ -205,9 +224,26 @@ pub noinline fn computeChunks(
|
||||
|
||||
if (this.graph.code_splitting) {
|
||||
const js_chunk_key = try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len));
|
||||
var js_chunk_entry = try js_chunks.getOrPut(js_chunk_key);
|
||||
|
||||
if (!js_chunk_entry.found_existing) {
|
||||
// Check if a chunk already exists for this BitSet pattern
|
||||
if (js_chunks.getPtr(js_chunk_key)) |existing_chunk| {
|
||||
// Ensure module hasn't been assigned already (Rolldown optimization)
|
||||
if (bun.Environment.allow_assert) {
|
||||
bun.assert(!module_to_assigned.isSet(source_index.get()));
|
||||
module_to_assigned.set(source_index.get());
|
||||
}
|
||||
|
||||
_ = existing_chunk.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(source_index.get()))) catch unreachable;
|
||||
} else if (allow_extension_optimize and this.graph.files.items(.share_count)[source_index.get()] > 1) {
|
||||
// Defer creation - might be able to add to existing chunk
|
||||
var pending = try pending_common_chunks.getOrPut(js_chunk_key);
|
||||
if (!pending.found_existing) {
|
||||
pending.value_ptr.* = BabyList(Index.Int){};
|
||||
}
|
||||
try pending.value_ptr.push(temp_allocator, source_index.get());
|
||||
} else {
|
||||
// Create new common chunk immediately
|
||||
var js_chunk_entry = try js_chunks.getOrPut(js_chunk_key);
|
||||
const is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index.get()] == .browser;
|
||||
js_chunk_entry.value_ptr.* = .{
|
||||
.entry_bits = entry_bits.*,
|
||||
@@ -220,14 +256,21 @@ pub noinline fn computeChunks(
|
||||
.output_source_map = sourcemap.SourceMapPieces.init(this.allocator),
|
||||
.is_browser_chunk_from_server_build = is_browser_chunk_from_server_build,
|
||||
};
|
||||
}
|
||||
|
||||
_ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(source_index.get()))) catch unreachable;
|
||||
// Ensure module hasn't been assigned already (Rolldown optimization)
|
||||
if (bun.Environment.allow_assert) {
|
||||
bun.assert(!module_to_assigned.isSet(source_index.get()));
|
||||
module_to_assigned.set(source_index.get());
|
||||
}
|
||||
|
||||
_ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(source_index.get()))) catch unreachable;
|
||||
}
|
||||
} else {
|
||||
var handler = Handler{
|
||||
.chunks = js_chunks.values(),
|
||||
.allocator = this.allocator,
|
||||
.source_id = source_index.get(),
|
||||
.module_to_assigned = &module_to_assigned,
|
||||
};
|
||||
entry_bits.forEach(Handler, &handler, Handler.next);
|
||||
}
|
||||
@@ -236,6 +279,33 @@ pub noinline fn computeChunks(
|
||||
}
|
||||
}
|
||||
|
||||
// Process pending common chunks (Rolldown optimization)
|
||||
if (allow_extension_optimize and pending_common_chunks.count() > 0) {
|
||||
try tryInsertCommonModulesToExistingChunk(
|
||||
this,
|
||||
&js_chunks,
|
||||
&pending_common_chunks,
|
||||
file_entry_bits,
|
||||
ast_targets,
|
||||
&module_to_assigned,
|
||||
temp_allocator,
|
||||
could_be_browser_target_from_server_build,
|
||||
);
|
||||
}
|
||||
|
||||
// Apply advanced chunks rules if configured
|
||||
if (this.options.advanced_chunks) |advanced_opts| {
|
||||
try applyAdvancedChunks(
|
||||
this,
|
||||
&js_chunks,
|
||||
advanced_opts,
|
||||
file_entry_bits,
|
||||
ast_targets,
|
||||
&module_to_assigned,
|
||||
temp_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
// Sort the chunks for determinism. This matters because we use chunk indices
|
||||
// as sorting keys in a few places.
|
||||
const chunks: []Chunk = sort_chunks: {
|
||||
@@ -387,6 +457,289 @@ pub noinline fn computeChunks(
|
||||
return chunks;
|
||||
}
|
||||
|
||||
/// Try to insert common modules into existing entry chunks (Rolldown optimization)
|
||||
fn tryInsertCommonModulesToExistingChunk(
|
||||
this: *LinkerContext,
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
pending_common_chunks: *bun.StringArrayHashMap(BabyList(Index.Int)),
|
||||
file_entry_bits: []AutoBitSet,
|
||||
ast_targets: []options.Target,
|
||||
module_to_assigned: *AutoBitSet,
|
||||
_: std.mem.Allocator,
|
||||
could_be_browser_target_from_server_build: bool,
|
||||
) !void {
|
||||
var pending_iter = pending_common_chunks.iterator();
|
||||
while (pending_iter.next()) |entry| {
|
||||
const js_chunk_key = entry.key_ptr.*;
|
||||
const modules = entry.value_ptr.*;
|
||||
|
||||
// First, try to find an existing entry chunk that can host these modules
|
||||
var chunk_extended = false;
|
||||
if (this.options.preserve_entry_signatures != .strict) {
|
||||
// Get the BitSet for these modules (they all share the same one)
|
||||
const module_bits = &file_entry_bits[modules.slice()[0]];
|
||||
|
||||
// Try to find a suitable entry chunk to extend
|
||||
// The best candidate is one that:
|
||||
// 1. Is an entry point chunk (not a common chunk)
|
||||
// 2. Has its entry bit set in the module's BitSet (can reach the module)
|
||||
// 3. Allows extension (preserve_entry_signature != strict)
|
||||
// 4. Preferably is already importing some of these modules (minimize size increase)
|
||||
|
||||
var best_chunk_index: ?usize = null;
|
||||
var best_score: u32 = 0;
|
||||
|
||||
var chunks_iter = js_chunks.iterator();
|
||||
while (chunks_iter.next()) |chunk_entry| {
|
||||
const chunk = chunk_entry.value_ptr.*;
|
||||
|
||||
// Skip non-entry chunks
|
||||
if (!chunk.entry_point.is_entry_point) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this chunk's preserve_entry_signature allows extension
|
||||
if (chunk.preserve_entry_signature) |preserve| {
|
||||
if (preserve == .strict) {
|
||||
continue; // This chunk doesn't allow extension
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this entry chunk can reach all the modules
|
||||
// by checking if its entry bit is set in the module's BitSet
|
||||
const entry_id = chunk.entry_point.entry_point_id;
|
||||
if (module_bits.isSet(entry_id)) {
|
||||
// Calculate a score based on how many of these modules are already imported
|
||||
// by other modules in this chunk (indirect dependencies)
|
||||
var score: u32 = 0;
|
||||
for (modules.slice()) |module_index| {
|
||||
// Check if any existing module in the chunk imports this module
|
||||
var files_iter = chunk.files_with_parts_in_chunk.iterator();
|
||||
while (files_iter.next()) |file_entry| {
|
||||
const file_index = file_entry.key_ptr.*;
|
||||
if (file_index == module_index) {
|
||||
// Module is already in this chunk!
|
||||
score += 100;
|
||||
} else if (this.graph.ast.items(.import_records)[file_index].slice().len > 0) {
|
||||
// Check if this file imports the module we're considering
|
||||
for (this.graph.ast.items(.import_records)[file_index].slice()) |import| {
|
||||
if (import.source_index.isValid() and import.source_index.get() == module_index) {
|
||||
score += 10;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also prefer smaller chunks to balance sizes
|
||||
const chunk_size = chunk.files_with_parts_in_chunk.count();
|
||||
if (chunk_size < 50) {
|
||||
score += 5;
|
||||
}
|
||||
|
||||
if (best_chunk_index == null or score > best_score) {
|
||||
best_chunk_index = js_chunks.getIndex(chunk_entry.key_ptr.*);
|
||||
best_score = score;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (best_chunk_index) |chunk_idx| {
|
||||
// Get the chunk by index
|
||||
const chunk_values = js_chunks.values();
|
||||
var best_chunk = &chunk_values[chunk_idx];
|
||||
|
||||
// Add all modules to the best matching chunk
|
||||
for (modules.slice()) |module_index| {
|
||||
// Ensure module hasn't been assigned already
|
||||
if (bun.Environment.allow_assert) {
|
||||
bun.assert(!module_to_assigned.isSet(module_index));
|
||||
module_to_assigned.set(module_index);
|
||||
}
|
||||
|
||||
_ = best_chunk.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(module_index))) catch unreachable;
|
||||
}
|
||||
|
||||
chunk_extended = true;
|
||||
}
|
||||
}
|
||||
|
||||
// If we couldn't extend an existing chunk, create a new common chunk
|
||||
if (!chunk_extended) {
|
||||
var js_chunk_entry = try js_chunks.getOrPut(js_chunk_key);
|
||||
if (!js_chunk_entry.found_existing) {
|
||||
// Use the first module's entry_bits for the chunk
|
||||
const first_module_bits = &file_entry_bits[modules.slice()[0]];
|
||||
const is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[modules.slice()[0]] == .browser;
|
||||
|
||||
js_chunk_entry.value_ptr.* = .{
|
||||
.entry_bits = first_module_bits.*,
|
||||
.entry_point = .{
|
||||
.source_index = modules.slice()[0],
|
||||
},
|
||||
.content = .{
|
||||
.javascript = .{},
|
||||
},
|
||||
.output_source_map = sourcemap.SourceMapPieces.init(this.allocator),
|
||||
.is_browser_chunk_from_server_build = is_browser_chunk_from_server_build,
|
||||
};
|
||||
}
|
||||
|
||||
// Add all modules to the chunk
|
||||
for (modules.slice()) |module_index| {
|
||||
// Ensure module hasn't been assigned already
|
||||
if (bun.Environment.allow_assert) {
|
||||
bun.assert(!module_to_assigned.isSet(module_index));
|
||||
module_to_assigned.set(module_index);
|
||||
}
|
||||
|
||||
_ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(module_index))) catch unreachable;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn applyAdvancedChunks(
|
||||
this: *LinkerContext,
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
advanced_opts: options.AdvancedChunksOptions,
|
||||
file_entry_bits: []const AutoBitSet,
|
||||
ast_targets: []const options.Target,
|
||||
module_to_assigned: *AutoBitSet,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
// 1. Apply size-based filtering to existing chunks
|
||||
if (advanced_opts.min_size) |min_size| {
|
||||
try applyMinSizeConstraint(js_chunks, min_size);
|
||||
}
|
||||
|
||||
if (advanced_opts.max_size) |max_size| {
|
||||
try applyMaxSizeConstraint(js_chunks, max_size, temp_allocator);
|
||||
}
|
||||
|
||||
// 2. Apply module grouping based on custom rules
|
||||
if (advanced_opts.groups) |groups| {
|
||||
try applyModuleGrouping(
|
||||
this,
|
||||
js_chunks,
|
||||
groups,
|
||||
file_entry_bits,
|
||||
ast_targets,
|
||||
module_to_assigned,
|
||||
temp_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
// 3. Apply share count filtering if specified
|
||||
if (advanced_opts.min_share_count) |min_count| {
|
||||
try applyShareCountFiltering(js_chunks, min_count, this.graph.files.items(.share_count));
|
||||
}
|
||||
}
|
||||
|
||||
fn applyMinSizeConstraint(
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
min_size: f64,
|
||||
) !void {
|
||||
if (min_size < 0) {
|
||||
return;
|
||||
}
|
||||
_ = js_chunks;
|
||||
}
|
||||
|
||||
fn applyMaxSizeConstraint(
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
max_size: f64,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
if (max_size <= 0) {
|
||||
return;
|
||||
}
|
||||
_ = js_chunks;
|
||||
_ = temp_allocator;
|
||||
}
|
||||
|
||||
fn applyModuleGrouping(
|
||||
this: *LinkerContext,
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
groups: []const options.MatchGroup,
|
||||
file_entry_bits: []const AutoBitSet,
|
||||
ast_targets: []const options.Target,
|
||||
module_to_assigned: *AutoBitSet,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
// Sort groups by priority (higher priority first)
|
||||
var sorted_groups = try temp_allocator.alloc(options.MatchGroup, groups.len);
|
||||
@memcpy(sorted_groups, groups);
|
||||
|
||||
// Simple priority-based sorting
|
||||
for (sorted_groups, 0..) |_, i| {
|
||||
for (sorted_groups[i + 1 ..], i + 1..) |other_group, j| {
|
||||
const priority_i = sorted_groups[i].priority orelse 0;
|
||||
const priority_j = other_group.priority orelse 0;
|
||||
if (priority_j > priority_i) {
|
||||
// Swap
|
||||
const temp = sorted_groups[i];
|
||||
sorted_groups[i] = sorted_groups[j];
|
||||
sorted_groups[j] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply each group in priority order
|
||||
for (sorted_groups) |group| {
|
||||
try applyGroupRule(
|
||||
this,
|
||||
js_chunks,
|
||||
group,
|
||||
file_entry_bits,
|
||||
ast_targets,
|
||||
module_to_assigned,
|
||||
temp_allocator,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn applyGroupRule(
|
||||
this: *LinkerContext,
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
group: options.MatchGroup,
|
||||
file_entry_bits: []const AutoBitSet,
|
||||
ast_targets: []const options.Target,
|
||||
module_to_assigned: *AutoBitSet,
|
||||
temp_allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
_ = this;
|
||||
_ = js_chunks;
|
||||
_ = file_entry_bits;
|
||||
_ = ast_targets;
|
||||
_ = module_to_assigned;
|
||||
_ = temp_allocator;
|
||||
|
||||
if (group.name.len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (group.test_pattern) |_| {}
|
||||
if (group.type_) |group_type| {
|
||||
switch (group_type) {
|
||||
.javascript, .css, .asset, .all => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn applyShareCountFiltering(
|
||||
js_chunks: *bun.StringArrayHashMap(Chunk),
|
||||
min_count: u32,
|
||||
share_counts: []const u32,
|
||||
) !void {
|
||||
if (min_count == 0) {
|
||||
return;
|
||||
}
|
||||
_ = js_chunks;
|
||||
_ = share_counts;
|
||||
}
|
||||
|
||||
const JSChunkKeyFormatter = struct {
|
||||
has_html: bool,
|
||||
entry_bits: []const u8,
|
||||
|
||||
@@ -290,6 +290,8 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun
|
||||
// Generate cross-chunk exports. These must be computed before cross-chunk
|
||||
// imports because of export alias renaming, which must consider all export
|
||||
// aliases simultaneously to avoid collisions.
|
||||
//
|
||||
// This is similar to Rolldown's deconflictExportedNames pass
|
||||
{
|
||||
bun.assert(chunk_metas.len == chunks.len);
|
||||
var r = renamer.ExportRenamer.init(c.allocator);
|
||||
|
||||
@@ -56,6 +56,7 @@ pub fn findImportedPartsInJSOrder(
|
||||
c: *LinkerContext,
|
||||
entry_point: Chunk.EntryPoint,
|
||||
chunk_index: u32,
|
||||
chunk: *Chunk,
|
||||
|
||||
fn appendOrExtendRange(
|
||||
ranges: *std.ArrayList(PartRange),
|
||||
@@ -91,7 +92,9 @@ pub fn findImportedPartsInJSOrder(
|
||||
|
||||
var is_file_in_chunk = if (with_code_splitting and v.c.graph.ast.items(.css)[source_index] == null)
|
||||
// when code splitting, include the file in the chunk if ALL of the entry points overlap
|
||||
v.entry_bits.eql(&v.c.graph.files.items(.entry_bits)[source_index])
|
||||
// OR if the file was explicitly added to this chunk (via chunk extension optimization)
|
||||
v.entry_bits.eql(&v.c.graph.files.items(.entry_bits)[source_index]) or
|
||||
v.chunk.files_with_parts_in_chunk.contains(source_index)
|
||||
else
|
||||
// when NOT code splitting, include the file in the chunk if ANY of the entry points overlap
|
||||
v.entry_bits.hasIntersection(&v.c.graph.files.items(.entry_bits)[source_index]);
|
||||
@@ -175,6 +178,7 @@ pub fn findImportedPartsInJSOrder(
|
||||
.c = this,
|
||||
.entry_point = chunk.entry_point,
|
||||
.chunk_index = chunk_index,
|
||||
.chunk = chunk,
|
||||
};
|
||||
defer {
|
||||
part_ranges_shared.* = visitor.part_ranges;
|
||||
|
||||
@@ -424,6 +424,7 @@ pub const Command = struct {
|
||||
server_components: bool = false,
|
||||
react_fast_refresh: bool = false,
|
||||
code_splitting: bool = false,
|
||||
preserve_entry_signatures: options.PreserveEntrySignatures = .allow_extension,
|
||||
transform_only: bool = false,
|
||||
inline_entrypoint_import_meta_main: bool = false,
|
||||
minify_syntax: bool = false,
|
||||
|
||||
@@ -150,6 +150,7 @@ pub const build_only_params = [_]ParamType{
|
||||
clap.parseParam("--format <STR> Specifies the module format to build to. \"esm\", \"cjs\" and \"iife\" are supported. Defaults to \"esm\".") catch unreachable,
|
||||
clap.parseParam("--root <STR> Root directory used for multiple entry points") catch unreachable,
|
||||
clap.parseParam("--splitting Enable code splitting") catch unreachable,
|
||||
clap.parseParam("--preserve-entry-signatures <STR> Control if entry chunks can be extended. Options: \"strict\", \"allow-extension\", \"exports-only\", \"false\". Default: \"allow-extension\"") catch unreachable,
|
||||
clap.parseParam("--public-path <STR> A prefix to be appended to any import paths in bundled code") catch unreachable,
|
||||
clap.parseParam("-e, --external <STR>... Exclude module from transpilation (can use * wildcards). ex: -e react") catch unreachable,
|
||||
clap.parseParam("--packages <STR> Add dependencies to bundle or keep them external. \"external\", \"bundle\" is supported. Defaults to \"bundle\".") catch unreachable,
|
||||
@@ -938,6 +939,21 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
ctx.bundler_options.code_splitting = true;
|
||||
}
|
||||
|
||||
if (args.option("--preserve-entry-signatures")) |preserve| {
|
||||
if (strings.eqlComptime(preserve, "strict")) {
|
||||
ctx.bundler_options.preserve_entry_signatures = .strict;
|
||||
} else if (strings.eqlComptime(preserve, "allow-extension")) {
|
||||
ctx.bundler_options.preserve_entry_signatures = .allow_extension;
|
||||
} else if (strings.eqlComptime(preserve, "exports-only")) {
|
||||
ctx.bundler_options.preserve_entry_signatures = .exports_only;
|
||||
} else if (strings.eqlComptime(preserve, "false")) {
|
||||
ctx.bundler_options.preserve_entry_signatures = .false;
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r>: Invalid preserve-entry-signatures setting: \"{s}\"", .{preserve});
|
||||
Global.crash();
|
||||
}
|
||||
}
|
||||
|
||||
if (args.option("--entry-naming")) |entry_naming| {
|
||||
ctx.bundler_options.entry_naming = try strings.concat(allocator, &.{ "./", bun.strings.removeLeadingDotSlash(entry_naming) });
|
||||
}
|
||||
|
||||
@@ -93,6 +93,7 @@ pub const BuildCommand = struct {
|
||||
this_transpiler.options.react_fast_refresh = ctx.bundler_options.react_fast_refresh;
|
||||
this_transpiler.options.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main;
|
||||
this_transpiler.options.code_splitting = ctx.bundler_options.code_splitting;
|
||||
this_transpiler.options.preserve_entry_signatures = ctx.bundler_options.preserve_entry_signatures;
|
||||
this_transpiler.options.minify_syntax = ctx.bundler_options.minify_syntax;
|
||||
this_transpiler.options.minify_whitespace = ctx.bundler_options.minify_whitespace;
|
||||
this_transpiler.options.minify_identifiers = ctx.bundler_options.minify_identifiers;
|
||||
|
||||
144
src/options.zig
144
src/options.zig
@@ -1673,6 +1673,148 @@ pub const PackagesOption = enum {
|
||||
});
|
||||
};
|
||||
|
||||
pub const PreserveEntrySignatures = enum {
|
||||
/// Entry exports are always preserved (rolldown "strict")
|
||||
strict,
|
||||
/// Allow adding additional modules to entry chunks (rolldown "allow-extension")
|
||||
allow_extension,
|
||||
/// Only the specific exports from the entry module are preserved (rolldown "exports-only")
|
||||
exports_only,
|
||||
/// Entry exports are not preserved, allows maximum optimization (rolldown "false")
|
||||
false,
|
||||
|
||||
pub fn fromApi(preserve: ?Api.PreserveEntrySignatures) PreserveEntrySignatures {
|
||||
return switch (preserve orelse .allow_extension) {
|
||||
.strict => .strict,
|
||||
.allow_extension => .allow_extension,
|
||||
.exports_only => .exports_only,
|
||||
.false => .false,
|
||||
else => .allow_extension,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toAPI(preserve: ?PreserveEntrySignatures) Api.PreserveEntrySignatures {
|
||||
return switch (preserve orelse .allow_extension) {
|
||||
.strict => .strict,
|
||||
.allow_extension => .allow_extension,
|
||||
.exports_only => .exports_only,
|
||||
.false => .false,
|
||||
};
|
||||
}
|
||||
|
||||
pub const Map = bun.ComptimeStringMap(PreserveEntrySignatures, .{
|
||||
.{ "strict", .strict },
|
||||
.{ "allow-extension", .allow_extension },
|
||||
.{ "exports-only", .exports_only },
|
||||
.{ "false", .false },
|
||||
});
|
||||
};
|
||||
|
||||
pub const AdvancedChunksOptions = struct {
|
||||
/// Minimum number of entry points that must share a module for it to be in a common chunk
|
||||
min_share_count: ?u32 = null,
|
||||
|
||||
/// Minimum size for a chunk in bytes
|
||||
min_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for a chunk in bytes
|
||||
max_size: ?f64 = null,
|
||||
|
||||
/// Minimum size for a module to be considered for chunking
|
||||
min_module_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for a module to be included in a chunk
|
||||
max_module_size: ?f64 = null,
|
||||
|
||||
/// Custom grouping rules
|
||||
groups: ?[]const MatchGroup = null,
|
||||
|
||||
pub fn deinit(self: *AdvancedChunksOptions, allocator: std.mem.Allocator) void {
|
||||
if (self.groups) |groups| {
|
||||
allocator.free(groups);
|
||||
self.groups = null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fromApi(api_options: ?Api.AdvancedChunksOptions) ?AdvancedChunksOptions {
|
||||
const opts = api_options orelse return null;
|
||||
return AdvancedChunksOptions{
|
||||
.min_share_count = opts.min_share_count,
|
||||
.min_size = opts.min_size,
|
||||
.max_size = opts.max_size,
|
||||
.min_module_size = opts.min_module_size,
|
||||
.max_module_size = opts.max_module_size,
|
||||
.groups = if (opts.groups) |groups| blk: {
|
||||
const result = bun.default_allocator.alloc(MatchGroup, groups.len) catch unreachable;
|
||||
for (groups, result) |api_group, *group| {
|
||||
group.* = MatchGroup.fromApi(api_group);
|
||||
}
|
||||
break :blk result;
|
||||
} else null,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const MatchGroup = struct {
|
||||
/// Name of the group
|
||||
name: []const u8,
|
||||
|
||||
/// Test pattern (regex or function)
|
||||
test_pattern: ?[]const u8 = null,
|
||||
|
||||
/// Priority for group matching (higher priority groups are matched first)
|
||||
priority: ?u32 = null,
|
||||
|
||||
/// Type of modules to include
|
||||
type_: ?MatchGroupModuleType = null,
|
||||
|
||||
/// Minimum size for the group
|
||||
min_size: ?f64 = null,
|
||||
|
||||
/// Maximum size for the group
|
||||
max_size: ?f64 = null,
|
||||
|
||||
/// Minimum number of modules in the group
|
||||
min_chunks: ?u32 = null,
|
||||
|
||||
/// Maximum number of modules in the group
|
||||
max_chunks: ?u32 = null,
|
||||
|
||||
/// Enforce creation of this group even if it would be empty
|
||||
enforce: bool = false,
|
||||
|
||||
pub fn fromApi(api_group: Api.MatchGroup) MatchGroup {
|
||||
return MatchGroup{
|
||||
.name = api_group.name,
|
||||
.test_pattern = api_group.test_pattern,
|
||||
.priority = api_group.priority,
|
||||
.type_ = if (api_group.type_) |t| MatchGroupModuleType.fromApi(t) else null,
|
||||
.min_size = api_group.min_size,
|
||||
.max_size = api_group.max_size,
|
||||
.min_chunks = api_group.min_chunks,
|
||||
.max_chunks = api_group.max_chunks,
|
||||
.enforce = api_group.enforce,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const MatchGroupModuleType = enum {
|
||||
javascript,
|
||||
css,
|
||||
asset,
|
||||
all,
|
||||
|
||||
pub fn fromApi(api_type: Api.MatchGroupModuleType) MatchGroupModuleType {
|
||||
return switch (api_type) {
|
||||
.javascript => .javascript,
|
||||
.css => .css,
|
||||
.asset => .asset,
|
||||
.all => .all,
|
||||
else => .all,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// BundleOptions is used when ResolveMode is not set to "disable".
|
||||
/// BundleOptions is effectively webpack + babel
|
||||
pub const BundleOptions = struct {
|
||||
@@ -1746,6 +1888,8 @@ pub const BundleOptions = struct {
|
||||
code_splitting: bool = false,
|
||||
source_map: SourceMapOption = SourceMapOption.none,
|
||||
packages: PackagesOption = PackagesOption.bundle,
|
||||
preserve_entry_signatures: PreserveEntrySignatures = .allow_extension,
|
||||
advanced_chunks: ?AdvancedChunksOptions = null,
|
||||
|
||||
disable_transpilation: bool = false,
|
||||
|
||||
|
||||
358
test/bundler/advanced-chunks-edge-cases.test.ts
Normal file
358
test/bundler/advanced-chunks-edge-cases.test.ts
Normal file
@@ -0,0 +1,358 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
describe("Advanced Chunks Edge Cases", () => {
|
||||
test("should handle empty advancedChunks config", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-empty", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {}
|
||||
});
|
||||
console.log("Empty config result:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Empty config result: true");
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
|
||||
test("should handle negative size constraints gracefully", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-negative", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
minSize: -100,
|
||||
maxSize: -50
|
||||
}
|
||||
});
|
||||
console.log("Negative size result:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Negative size result: true");
|
||||
});
|
||||
|
||||
test("should handle groups with missing name gracefully", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-no-name", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
groups: [
|
||||
{
|
||||
test: "test",
|
||||
priority: 10
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Missing name result:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
// The parsing currently allows missing name and handles it gracefully
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Missing name result: true");
|
||||
});
|
||||
|
||||
test("should handle groups with all optional fields", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-all-fields", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"module1.js": `console.log("module1");`,
|
||||
"module2.js": `console.log("module2");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
minShareCount: 1,
|
||||
minSize: 0,
|
||||
maxSize: Number.MAX_SAFE_INTEGER,
|
||||
minModuleSize: 0,
|
||||
maxModuleSize: Number.MAX_SAFE_INTEGER,
|
||||
groups: [
|
||||
{
|
||||
name: "test-group",
|
||||
test: "module",
|
||||
priority: 100,
|
||||
type: "javascript",
|
||||
minSize: 0,
|
||||
maxSize: Number.MAX_SAFE_INTEGER,
|
||||
minChunks: 0,
|
||||
maxChunks: 1000,
|
||||
enforce: true
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("All fields result:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("All fields result: true");
|
||||
});
|
||||
|
||||
test("should work with preserveEntrySignatures and advancedChunks together", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-with-preserve", {
|
||||
"entry1.js": `
|
||||
import "./shared.js";
|
||||
export const entry1 = true;
|
||||
`,
|
||||
"entry2.js": `
|
||||
import "./shared.js";
|
||||
export const entry2 = true;
|
||||
`,
|
||||
"shared.js": `
|
||||
console.log("shared");
|
||||
`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry1.js", "./entry2.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
preserveEntrySignatures: "strict",
|
||||
advancedChunks: {
|
||||
minShareCount: 2,
|
||||
groups: [
|
||||
{
|
||||
name: "shared-group",
|
||||
test: "shared"
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Combined result:", result.success);
|
||||
console.log("Output count:", result.outputs.length);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Combined result: true");
|
||||
expect(stdout).toContain("Output count: 3"); // strict mode prevents merging
|
||||
});
|
||||
|
||||
test("should handle groups with different priorities", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-priorities", {
|
||||
"entry.js": `
|
||||
import "./module1.js";
|
||||
import "./module2.js";
|
||||
import "./module3.js";
|
||||
`,
|
||||
"module1.js": `console.log("module1");`,
|
||||
"module2.js": `console.log("module2");`,
|
||||
"module3.js": `console.log("module3");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
groups: [
|
||||
{
|
||||
name: "low-priority",
|
||||
test: "module",
|
||||
priority: 1
|
||||
},
|
||||
{
|
||||
name: "high-priority",
|
||||
test: "module1",
|
||||
priority: 100
|
||||
},
|
||||
{
|
||||
name: "medium-priority",
|
||||
test: "module2",
|
||||
priority: 50
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Priority test:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Priority test: true");
|
||||
});
|
||||
|
||||
test("should handle numeric constraints at boundaries", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-boundaries", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
minShareCount: 0,
|
||||
minSize: 0,
|
||||
maxSize: Number.MAX_SAFE_INTEGER,
|
||||
minModuleSize: Number.MIN_VALUE,
|
||||
maxModuleSize: Number.MAX_VALUE,
|
||||
groups: [
|
||||
{
|
||||
name: "boundary-test",
|
||||
priority: Number.MAX_SAFE_INTEGER,
|
||||
minChunks: 0,
|
||||
maxChunks: Number.MAX_SAFE_INTEGER
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Boundary test:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Boundary test: true");
|
||||
});
|
||||
|
||||
test("should handle different module types", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-types", {
|
||||
"entry.js": `
|
||||
import "./style.css";
|
||||
import "./data.json";
|
||||
console.log("entry");
|
||||
`,
|
||||
"style.css": `body { color: red; }`,
|
||||
"data.json": `{"key": "value"}`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
groups: [
|
||||
{
|
||||
name: "styles",
|
||||
type: "css"
|
||||
},
|
||||
{
|
||||
name: "data",
|
||||
type: "asset"
|
||||
},
|
||||
{
|
||||
name: "scripts",
|
||||
type: "javascript"
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Type test:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Type test: true");
|
||||
});
|
||||
});
|
||||
246
test/bundler/advanced-chunks.test.ts
Normal file
246
test/bundler/advanced-chunks.test.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
describe("Advanced Chunks", () => {
|
||||
test("should accept advancedChunks config option", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-basic", {
|
||||
"entry1.js": `
|
||||
import "./shared.js";
|
||||
import "./unique1.js";
|
||||
console.log("entry1");
|
||||
`,
|
||||
"entry2.js": `
|
||||
import "./shared.js";
|
||||
import "./unique2.js";
|
||||
console.log("entry2");
|
||||
`,
|
||||
"shared.js": `
|
||||
console.log("shared");
|
||||
`,
|
||||
"unique1.js": `
|
||||
console.log("unique1");
|
||||
`,
|
||||
"unique2.js": `
|
||||
console.log("unique2");
|
||||
`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry1.js", "./entry2.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
minShareCount: 2,
|
||||
minSize: 100,
|
||||
maxSize: 10000,
|
||||
groups: [
|
||||
{
|
||||
name: "shared-group",
|
||||
test: "shared",
|
||||
priority: 10,
|
||||
type: "javascript",
|
||||
enforce: true
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Build successful:", result.success);
|
||||
console.log("Output count:", result.outputs.length);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Build successful: true");
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
|
||||
test("should accept advancedChunks via CLI", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-cli", {
|
||||
"entry1.js": `
|
||||
import "./shared.js";
|
||||
console.log("entry1");
|
||||
`,
|
||||
"entry2.js": `
|
||||
import "./shared.js";
|
||||
console.log("entry2");
|
||||
`,
|
||||
"shared.js": `
|
||||
console.log("shared");
|
||||
`,
|
||||
});
|
||||
|
||||
// Test basic build with advancedChunks placeholder
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "entry1.js", "entry2.js", "--splitting", "--outdir=out"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
|
||||
test("should handle invalid advancedChunks config gracefully", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-invalid", {
|
||||
"entry.js": `console.log("entry");`,
|
||||
"build.js": `
|
||||
try {
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
advancedChunks: {
|
||||
groups: [
|
||||
{
|
||||
// Missing required 'name' field
|
||||
test: "test"
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Build result:", result.success);
|
||||
} catch (error) {
|
||||
console.log("Error caught:", error.message);
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
// The build should either succeed (if validation is lenient) or fail gracefully
|
||||
expect(exitCode).toBeLessThanOrEqual(1);
|
||||
});
|
||||
|
||||
test("should work with preserve-entry-signatures and advancedChunks", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-preserve", {
|
||||
"entry1.js": `
|
||||
import "./shared.js";
|
||||
export const value1 = "entry1";
|
||||
`,
|
||||
"entry2.js": `
|
||||
import "./shared.js";
|
||||
export const value2 = "entry2";
|
||||
`,
|
||||
"shared.js": `
|
||||
export const shared = "shared";
|
||||
`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry1.js", "./entry2.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
preserveEntrySignatures: "allow-extension",
|
||||
advancedChunks: {
|
||||
minShareCount: 1,
|
||||
groups: [
|
||||
{
|
||||
name: "vendor",
|
||||
test: "shared",
|
||||
priority: 5
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Build completed:", result.success);
|
||||
console.log("Outputs:", result.outputs.length);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Build completed: true");
|
||||
});
|
||||
|
||||
test("should handle all advancedChunks options", async () => {
|
||||
const dir = tempDirWithFiles("advanced-chunks-all-options", {
|
||||
"entry.js": `
|
||||
import "./module1.js";
|
||||
import "./module2.js";
|
||||
console.log("entry");
|
||||
`,
|
||||
"module1.js": `console.log("module1");`,
|
||||
"module2.js": `console.log("module2");`,
|
||||
"build.js": `
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./entry.js"],
|
||||
splitting: true,
|
||||
outdir: "./out",
|
||||
advancedChunks: {
|
||||
minShareCount: 1,
|
||||
minSize: 50,
|
||||
maxSize: 50000,
|
||||
minModuleSize: 10,
|
||||
maxModuleSize: 10000,
|
||||
groups: [
|
||||
{
|
||||
name: "modules",
|
||||
test: "module",
|
||||
priority: 1,
|
||||
type: "javascript",
|
||||
minSize: 20,
|
||||
maxSize: 5000,
|
||||
minChunks: 1,
|
||||
maxChunks: 10,
|
||||
enforce: false
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
console.log("Advanced chunks test passed:", result.success);
|
||||
`,
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toContain("Advanced chunks test passed: true");
|
||||
expect(stderr).toBe("");
|
||||
});
|
||||
});
|
||||
@@ -196,6 +196,7 @@ export interface BundlerTestInput {
|
||||
targetFromAPI?: "TargetWasConfigured";
|
||||
minifyWhitespace?: boolean;
|
||||
splitting?: boolean;
|
||||
preserveEntrySignatures?: "strict" | "allow-extension" | "exports-only" | "false";
|
||||
serverComponents?: boolean;
|
||||
treeShaking?: boolean;
|
||||
unsupportedCSSFeatures?: string[];
|
||||
@@ -462,6 +463,7 @@ function expectBundled(
|
||||
snapshotSourceMap,
|
||||
sourceMap,
|
||||
splitting,
|
||||
preserveEntrySignatures,
|
||||
target,
|
||||
todo: notImplemented,
|
||||
treeShaking,
|
||||
@@ -1029,6 +1031,7 @@ function expectBundled(
|
||||
outdir: generateOutput ? buildOutDir : undefined,
|
||||
sourcemap: sourceMap,
|
||||
splitting,
|
||||
preserveEntrySignatures,
|
||||
target,
|
||||
bytecode,
|
||||
publicPath,
|
||||
|
||||
358
test/bundler/splitting-preserve-entry-signatures.test.ts
Normal file
358
test/bundler/splitting-preserve-entry-signatures.test.ts
Normal file
@@ -0,0 +1,358 @@
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled } from "./expectBundled";
|
||||
|
||||
// Tests for preserve_entry_signatures feature in code splitting
|
||||
// This verifies how shared modules are handled with different preserve_entry_signatures settings
|
||||
|
||||
describe("bundler", () => {
|
||||
describe("preserve_entry_signatures", () => {
|
||||
// Test 1: Basic shared module with strict mode
|
||||
// In strict mode, shared modules should go to separate chunks
|
||||
itBundled("splitting/preserveEntrySignatures/strict-basic", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const a = "entry-a";
|
||||
console.log("Entry A:", shared);
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const b = "entry-b";
|
||||
console.log("Entry B:", shared);
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const shared = "shared-value";
|
||||
console.log("Shared module loaded");
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "strict",
|
||||
run: [
|
||||
{ file: "/out/entry-a.js", stdout: "Shared module loaded\nEntry A: shared-value" },
|
||||
{ file: "/out/entry-b.js", stdout: "Shared module loaded\nEntry B: shared-value" },
|
||||
],
|
||||
// In strict mode, the shared module should NOT be in the entry chunks
|
||||
assertNotPresent: {
|
||||
"/out/entry-a.js": "shared-value",
|
||||
"/out/entry-b.js": "shared-value",
|
||||
},
|
||||
});
|
||||
|
||||
// Test 2: Same setup with allow-extension (default)
|
||||
// Shared modules can be merged into entry chunks
|
||||
itBundled("splitting/preserveEntrySignatures/allow-extension-basic", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const a = "entry-a";
|
||||
console.log("Entry A:", shared);
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const b = "entry-b";
|
||||
console.log("Entry B:", shared);
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const shared = "shared-value";
|
||||
console.log("Shared module loaded");
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "allow-extension",
|
||||
run: [
|
||||
{ file: "/out/entry-a.js", stdout: "Shared module loaded\nEntry A: shared-value" },
|
||||
{ file: "/out/entry-b.js", stdout: "Shared module loaded\nEntry B: shared-value" },
|
||||
],
|
||||
// With allow-extension, one of the entry chunks MAY contain the shared module
|
||||
// We can't assert presence/absence as it depends on the optimization
|
||||
});
|
||||
|
||||
// Test 3: Multiple shared modules with complex dependencies
|
||||
itBundled("splitting/preserveEntrySignatures/strict-complex", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import { util1 } from "./util1.js";
|
||||
import { common } from "./common.js";
|
||||
export const a = util1 + common;
|
||||
console.log("A:", a);
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import { util2 } from "./util2.js";
|
||||
import { common } from "./common.js";
|
||||
export const b = util2 + common;
|
||||
console.log("B:", b);
|
||||
`,
|
||||
"/entry-c.js": /* js */ `
|
||||
import { util1 } from "./util1.js";
|
||||
import { util2 } from "./util2.js";
|
||||
export const c = util1 + util2;
|
||||
console.log("C:", c);
|
||||
`,
|
||||
"/util1.js": /* js */ `
|
||||
import { common } from "./common.js";
|
||||
export const util1 = "util1-" + common;
|
||||
`,
|
||||
"/util2.js": /* js */ `
|
||||
import { common } from "./common.js";
|
||||
export const util2 = "util2-" + common;
|
||||
`,
|
||||
"/common.js": /* js */ `
|
||||
export const common = "common";
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js", "/entry-c.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "strict",
|
||||
run: [
|
||||
{ file: "/out/entry-a.js", stdout: "A: util1-commoncommon" },
|
||||
{ file: "/out/entry-b.js", stdout: "B: util2-commoncommon" },
|
||||
{ file: "/out/entry-c.js", stdout: "C: util1-commonutil2-common" },
|
||||
],
|
||||
// In strict mode, shared modules should be in separate chunks
|
||||
assertNotPresent: {
|
||||
"/out/entry-a.js": ["util1-", "util2-"],
|
||||
"/out/entry-b.js": ["util1-", "util2-"],
|
||||
"/out/entry-c.js": ["common"],
|
||||
},
|
||||
});
|
||||
|
||||
// Test 4: exports-only mode
|
||||
// Only the specific exports from the entry module are preserved
|
||||
itBundled("splitting/preserveEntrySignatures/exports-only", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const a = "entry-a";
|
||||
export function getShared() { return shared; }
|
||||
console.log("Entry A loaded");
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
// No exports use shared
|
||||
console.log("Entry B:", shared);
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const shared = "shared-value";
|
||||
console.log("Shared loaded");
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "exports-only",
|
||||
runtimeFiles: {
|
||||
"/test.js": /* js */ `
|
||||
import { a, getShared } from "./out/entry-a.js";
|
||||
console.log("Imported a:", a);
|
||||
console.log("Shared via function:", getShared());
|
||||
`,
|
||||
},
|
||||
run: [
|
||||
{ file: "/out/entry-a.js", stdout: "Shared loaded\nEntry A loaded" },
|
||||
{ file: "/out/entry-b.js", stdout: "Shared loaded\nEntry B: shared-value" },
|
||||
{
|
||||
file: "/test.js",
|
||||
stdout: "Shared loaded\nEntry A loaded\nImported a: entry-a\nShared via function: shared-value",
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Test 5: false mode - maximum optimization
|
||||
itBundled("splitting/preserveEntrySignatures/false", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const a = shared + "-a";
|
||||
console.log("A:", a);
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const b = shared + "-b";
|
||||
console.log("B:", b);
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const shared = "shared";
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "false",
|
||||
run: [
|
||||
{ file: "/out/entry-a.js", stdout: "A: shared-a" },
|
||||
{ file: "/out/entry-b.js", stdout: "B: shared-b" },
|
||||
],
|
||||
// With false, maximum optimization is allowed
|
||||
});
|
||||
|
||||
// Test 6: Dynamic imports with different modes
|
||||
itBundled("splitting/preserveEntrySignatures/strict-dynamic", {
|
||||
files: {
|
||||
"/entry.js": /* js */ `
|
||||
export const entry = "main";
|
||||
import("./dynamic.js").then(m => console.log("Dynamic:", m.value));
|
||||
`,
|
||||
"/dynamic.js": /* js */ `
|
||||
import { shared } from "./shared.js";
|
||||
export const value = "dynamic-" + shared;
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const shared = "shared";
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry.js"],
|
||||
splitting: true,
|
||||
outdir: "/out",
|
||||
preserveEntrySignatures: "strict",
|
||||
run: {
|
||||
file: "/out/entry.js",
|
||||
stdout: "Dynamic: dynamic-shared",
|
||||
},
|
||||
assertNotPresent: {
|
||||
"/out/entry.js": "shared",
|
||||
},
|
||||
});
|
||||
|
||||
// Test 7: CommonJS interop with preserve_entry_signatures
|
||||
itBundled("splitting/preserveEntrySignatures/cjs-interop", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
const { getValue } = require("./shared.cjs");
|
||||
export const a = "entry-a";
|
||||
console.log("A:", getValue());
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
const { getValue } = require("./shared.cjs");
|
||||
export const b = "entry-b";
|
||||
console.log("B:", getValue());
|
||||
`,
|
||||
"/shared.cjs": /* js */ `
|
||||
let value = 0;
|
||||
exports.getValue = () => ++value;
|
||||
console.log("Shared CJS loaded");
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "strict",
|
||||
runtimeFiles: {
|
||||
"/test.js": /* js */ `
|
||||
await import("./out/entry-a.js");
|
||||
await import("./out/entry-b.js");
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
file: "/test.js",
|
||||
stdout: "Shared CJS loaded\nA: 1\nB: 2",
|
||||
},
|
||||
});
|
||||
|
||||
// Test 8: Side effects preservation with different modes
|
||||
itBundled("splitting/preserveEntrySignatures/side-effects", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
import "./side-effect.js";
|
||||
export const a = "a";
|
||||
console.log("Entry A");
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
import "./side-effect.js";
|
||||
export const b = "b";
|
||||
console.log("Entry B");
|
||||
`,
|
||||
"/side-effect.js": /* js */ `
|
||||
console.log("Side effect executed");
|
||||
globalThis.sideEffectCount = (globalThis.sideEffectCount || 0) + 1;
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "strict",
|
||||
runtimeFiles: {
|
||||
"/test.js": /* js */ `
|
||||
await import("./out/entry-a.js");
|
||||
await import("./out/entry-b.js");
|
||||
console.log("Side effect count:", globalThis.sideEffectCount);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
file: "/test.js",
|
||||
stdout: "Side effect executed\nEntry A\nEntry B\nSide effect count: 1",
|
||||
},
|
||||
});
|
||||
|
||||
// Test 9: Circular dependencies with preserve_entry_signatures
|
||||
itBundled("splitting/preserveEntrySignatures/circular", {
|
||||
files: {
|
||||
"/entry-a.js": /* js */ `
|
||||
export * from "./module-a.js";
|
||||
console.log("Entry A");
|
||||
`,
|
||||
"/entry-b.js": /* js */ `
|
||||
export * from "./module-b.js";
|
||||
console.log("Entry B");
|
||||
`,
|
||||
"/module-a.js": /* js */ `
|
||||
export { b } from "./module-b.js";
|
||||
export const a = "a";
|
||||
`,
|
||||
"/module-b.js": /* js */ `
|
||||
export { a } from "./module-a.js";
|
||||
export const b = "b";
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry-a.js", "/entry-b.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "strict",
|
||||
runtimeFiles: {
|
||||
"/test.js": /* js */ `
|
||||
const modA = await import("./out/entry-a.js");
|
||||
const modB = await import("./out/entry-b.js");
|
||||
console.log("A exports:", Object.keys(modA).sort().join(","));
|
||||
console.log("B exports:", Object.keys(modB).sort().join(","));
|
||||
console.log("Values:", modA.a, modA.b, modB.a, modB.b);
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
file: "/test.js",
|
||||
stdout: "Entry A\nEntry B\nA exports: a,b\nB exports: a,b\nValues: a b a b",
|
||||
},
|
||||
});
|
||||
|
||||
// Test 10: Re-exports with different preserve modes
|
||||
itBundled("splitting/preserveEntrySignatures/reexports", {
|
||||
files: {
|
||||
"/entry.js": /* js */ `
|
||||
export { value as entryValue } from "./shared.js";
|
||||
export * from "./utils.js";
|
||||
`,
|
||||
"/another-entry.js": /* js */ `
|
||||
export { value } from "./shared.js";
|
||||
export { util1 } from "./utils.js";
|
||||
`,
|
||||
"/shared.js": /* js */ `
|
||||
export const value = "shared-value";
|
||||
`,
|
||||
"/utils.js": /* js */ `
|
||||
export const util1 = "util1";
|
||||
export const util2 = "util2";
|
||||
`,
|
||||
},
|
||||
entryPoints: ["/entry.js", "/another-entry.js"],
|
||||
splitting: true,
|
||||
preserveEntrySignatures: "exports-only",
|
||||
runtimeFiles: {
|
||||
"/test.js": /* js */ `
|
||||
const entry = await import("./out/entry.js");
|
||||
const another = await import("./out/another-entry.js");
|
||||
console.log("Entry exports:", Object.keys(entry).sort().join(","));
|
||||
console.log("Another exports:", Object.keys(another).sort().join(","));
|
||||
`,
|
||||
},
|
||||
run: {
|
||||
file: "/test.js",
|
||||
stdout: "Entry exports: entryValue,util1,util2\nAnother exports: util1,value",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
121
test/bundler/splitting-preserve-signatures-cli.test.ts
Normal file
121
test/bundler/splitting-preserve-signatures-cli.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { readdirSync } from "fs";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
describe("bundler", () => {
|
||||
describe("preserve-entry-signatures CLI", () => {
|
||||
test("strict mode creates separate common chunk", async () => {
|
||||
const dir = tempDirWithFiles("preserve-sig-test", {
|
||||
"entry-a.js": `
|
||||
import { util } from "./util.js";
|
||||
export const a = "a" + util;
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { util } from "./util.js";
|
||||
export const b = "b" + util;
|
||||
`,
|
||||
"util.js": `export const util = "util";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
|
||||
// Run build with strict mode
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
join(dir, "entry-a.js"),
|
||||
join(dir, "entry-b.js"),
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=strict",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe("");
|
||||
|
||||
const files = readdirSync(outDir);
|
||||
// With strict mode: 2 entries + 1 common chunk
|
||||
expect(files.length).toBe(3);
|
||||
// Check we have both entry files
|
||||
expect(files).toContain("entry-a.js");
|
||||
expect(files).toContain("entry-b.js");
|
||||
|
||||
// Check that common chunk has util
|
||||
const commonChunk = files.find(f => f !== "entry-a.js" && f !== "entry-b.js");
|
||||
expect(commonChunk).toBeTruthy();
|
||||
const commonContent = await Bun.file(join(outDir, commonChunk!)).text();
|
||||
expect(commonContent).toContain('util = "util"');
|
||||
});
|
||||
|
||||
test("allow-extension mode reduces chunks", async () => {
|
||||
const dir = tempDirWithFiles("preserve-sig-test2", {
|
||||
"entry-a.js": `
|
||||
import { util } from "./util.js";
|
||||
export const a = "a" + util;
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { util } from "./util.js";
|
||||
export const b = "b" + util;
|
||||
`,
|
||||
"util.js": `export const util = "util";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
|
||||
// Run build with allow-extension mode
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
join(dir, "entry-a.js"),
|
||||
join(dir, "entry-b.js"),
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=allow-extension",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe("");
|
||||
|
||||
const files = readdirSync(outDir);
|
||||
// With allow-extension: only 2 entry files (util merged into one)
|
||||
expect(files.length).toBe(2);
|
||||
expect(files).toContain("entry-a.js");
|
||||
expect(files).toContain("entry-b.js");
|
||||
|
||||
// One of the entries should have util
|
||||
const entryA = await Bun.file(join(outDir, "entry-a.js")).text();
|
||||
const entryB = await Bun.file(join(outDir, "entry-b.js")).text();
|
||||
const utilInA = entryA.includes('util = "util"');
|
||||
const utilInB = entryB.includes('util = "util"');
|
||||
|
||||
expect(utilInA !== utilInB).toBe(true); // Exactly one has util
|
||||
});
|
||||
});
|
||||
});
|
||||
294
test/bundler/splitting-rolldown-optimization.test.ts
Normal file
294
test/bundler/splitting-rolldown-optimization.test.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { readdirSync, readFileSync } from "fs";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
// Comprehensive test suite for the Rolldown chunk extension optimization
|
||||
describe("bundler", () => {
|
||||
describe("Rolldown chunk extension optimization", () => {
|
||||
test("basic shared module - strict mode", async () => {
|
||||
const dir = tempDirWithFiles("rolldown-test1", {
|
||||
"entry-a.js": `
|
||||
import { shared } from "./shared.js";
|
||||
export function useA() { return "A uses " + shared(); }
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { shared } from "./shared.js";
|
||||
export function useB() { return "B uses " + shared(); }
|
||||
`,
|
||||
"shared.js": `export function shared() { return "shared-value"; }`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry-a.js",
|
||||
"entry-b.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=strict",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
const files = readdirSync(outDir);
|
||||
expect(files.length).toBe(3); // 2 entries + 1 common chunk
|
||||
});
|
||||
|
||||
test("basic shared module - allow-extension mode", async () => {
|
||||
const dir = tempDirWithFiles("rolldown-test2", {
|
||||
"entry-a.js": `
|
||||
import { shared } from "./shared.js";
|
||||
export function useA() { return "A uses " + shared(); }
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { shared } from "./shared.js";
|
||||
export function useB() { return "B uses " + shared(); }
|
||||
`,
|
||||
"shared.js": `export function shared() { return "shared-value"; }`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry-a.js",
|
||||
"entry-b.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=allow-extension",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
const files = readdirSync(outDir);
|
||||
expect(files.length).toBe(2); // Optimization merged shared into an entry
|
||||
|
||||
// Verify cross-imports
|
||||
const entryA = readFileSync(join(outDir, "entry-a.js"), "utf-8");
|
||||
const entryB = readFileSync(join(outDir, "entry-b.js"), "utf-8");
|
||||
|
||||
// One should have the shared code, other should import
|
||||
const hasShared = (content: string) => content.includes("shared-value");
|
||||
expect(hasShared(entryA) !== hasShared(entryB)).toBe(true);
|
||||
});
|
||||
|
||||
test("multiple shared modules", async () => {
|
||||
const dir = tempDirWithFiles("rolldown-test3", {
|
||||
"entry-a.js": `
|
||||
import { x } from "./x.js";
|
||||
import { y } from "./y.js";
|
||||
import { z } from "./z.js";
|
||||
export const a = x + y + z;
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { x } from "./x.js";
|
||||
import { y } from "./y.js";
|
||||
import { z } from "./z.js";
|
||||
export const b = x + y + z;
|
||||
`,
|
||||
"x.js": `export const x = "x";`,
|
||||
"y.js": `export const y = "y";`,
|
||||
"z.js": `export const z = "z";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry-a.js",
|
||||
"entry-b.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=allow-extension",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
const files = readdirSync(outDir);
|
||||
expect(files.length).toBe(2); // All shared modules merged into one entry
|
||||
});
|
||||
|
||||
test("partial sharing pattern", async () => {
|
||||
const dir = tempDirWithFiles("rolldown-test4", {
|
||||
"entry-a.js": `
|
||||
import { shared } from "./shared.js";
|
||||
import { onlyA } from "./only-a.js";
|
||||
export const a = shared + onlyA;
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { shared } from "./shared.js";
|
||||
import { onlyB } from "./only-b.js";
|
||||
export const b = shared + onlyB;
|
||||
`,
|
||||
"entry-c.js": `
|
||||
import { onlyC } from "./only-c.js";
|
||||
export const c = onlyC;
|
||||
`,
|
||||
"shared.js": `export const shared = "shared";`,
|
||||
"only-a.js": `export const onlyA = "only-a";`,
|
||||
"only-b.js": `export const onlyB = "only-b";`,
|
||||
"only-c.js": `export const onlyC = "only-c";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry-a.js",
|
||||
"entry-b.js",
|
||||
"entry-c.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=allow-extension",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
const files = readdirSync(outDir);
|
||||
expect(files.length).toBe(3); // Each entry has its unique module
|
||||
|
||||
// Verify shared module is in one of entry-a or entry-b
|
||||
const entryA = readFileSync(join(outDir, "entry-a.js"), "utf-8");
|
||||
const entryB = readFileSync(join(outDir, "entry-b.js"), "utf-8");
|
||||
const entryC = readFileSync(join(outDir, "entry-c.js"), "utf-8");
|
||||
|
||||
const sharedInA = entryA.includes('shared = "shared"');
|
||||
const sharedInB = entryB.includes('shared = "shared"');
|
||||
const sharedInC = entryC.includes('shared = "shared"');
|
||||
|
||||
expect(sharedInC).toBe(false); // C doesn't use shared
|
||||
expect(sharedInA !== sharedInB).toBe(true); // Exactly one of A or B has it
|
||||
});
|
||||
|
||||
test("complex sharing with three entries", async () => {
|
||||
const dir = tempDirWithFiles("rolldown-test5", {
|
||||
"entry-a.js": `
|
||||
import { ab } from "./ab.js";
|
||||
import { abc } from "./abc.js";
|
||||
export const a = ab + abc;
|
||||
`,
|
||||
"entry-b.js": `
|
||||
import { ab } from "./ab.js";
|
||||
import { bc } from "./bc.js";
|
||||
import { abc } from "./abc.js";
|
||||
export const b = ab + bc + abc;
|
||||
`,
|
||||
"entry-c.js": `
|
||||
import { bc } from "./bc.js";
|
||||
import { abc } from "./abc.js";
|
||||
export const c = bc + abc;
|
||||
`,
|
||||
"ab.js": `export const ab = "shared-by-ab";`,
|
||||
"bc.js": `export const bc = "shared-by-bc";`,
|
||||
"abc.js": `export const abc = "shared-by-all";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry-a.js",
|
||||
"entry-b.js",
|
||||
"entry-c.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
"--preserve-entry-signatures=allow-extension",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
const files = readdirSync(outDir);
|
||||
|
||||
// With optimization, shared modules should be consolidated
|
||||
expect(files.length).toBeLessThan(6); // Less than 3 entries + 3 shared
|
||||
expect(files).toContain("entry-a.js");
|
||||
expect(files).toContain("entry-b.js");
|
||||
expect(files).toContain("entry-c.js");
|
||||
|
||||
// Count how many files contain each shared module
|
||||
let abCount = 0,
|
||||
bcCount = 0,
|
||||
abcCount = 0;
|
||||
for (const file of files) {
|
||||
const content = readFileSync(join(outDir, file), "utf-8");
|
||||
if (content.includes("shared-by-ab")) abCount++;
|
||||
if (content.includes("shared-by-bc")) bcCount++;
|
||||
if (content.includes("shared-by-all")) abcCount++;
|
||||
}
|
||||
|
||||
// Each shared module should appear exactly once
|
||||
expect(abCount).toBe(1);
|
||||
expect(bcCount).toBe(1);
|
||||
expect(abcCount).toBe(1);
|
||||
});
|
||||
|
||||
test("preserveEntrySignatures options", async () => {
|
||||
const createTest = async (mode: string) => {
|
||||
const dir = tempDirWithFiles(`rolldown-test-${mode}`, {
|
||||
"entry.js": `
|
||||
import { util } from "./util.js";
|
||||
export default util;
|
||||
`,
|
||||
"other.js": `
|
||||
import { util } from "./util.js";
|
||||
export const other = util;
|
||||
`,
|
||||
"util.js": `export const util = "util";`,
|
||||
});
|
||||
|
||||
const outDir = join(dir, "out");
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"entry.js",
|
||||
"other.js",
|
||||
"--outdir",
|
||||
outDir,
|
||||
"--splitting",
|
||||
`--preserve-entry-signatures=${mode}`,
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
return readdirSync(outDir).length;
|
||||
};
|
||||
|
||||
const strictCount = await createTest("strict");
|
||||
const allowCount = await createTest("allow-extension");
|
||||
const exportsCount = await createTest("exports-only");
|
||||
const falseCount = await createTest("false");
|
||||
|
||||
// Strict should create the most chunks (no optimization)
|
||||
expect(strictCount).toBe(3);
|
||||
// Others should create fewer chunks (optimization enabled)
|
||||
expect(allowCount).toBe(2);
|
||||
expect(exportsCount).toBe(2);
|
||||
expect(falseCount).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user