mirror of
https://github.com/oven-sh/bun
synced 2026-02-19 07:12:24 +00:00
Compare commits
7 Commits
dylan/fix-
...
jarred/bar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
474e66462e | ||
|
|
01fadc07e0 | ||
|
|
2f3c9e86a3 | ||
|
|
ce07c4dedf | ||
|
|
c515758c8c | ||
|
|
6698f60e79 | ||
|
|
c1f89b3e8d |
@@ -1291,6 +1291,28 @@ declare module "bun:bundle" {
|
||||
|
||||
Ensure the file is included in your `tsconfig.json` (e.g., `"include": ["src", "env.d.ts"]`). Now `feature()` only accepts those flags, and invalid strings like `feature("TYPO")` become type errors.
|
||||
|
||||
### optimizeImports
|
||||
|
||||
Skip parsing unused submodules of barrel files (re-export index files). When you import only a few named exports from a large library, normally the bundler parses every file the barrel re-exports. With `optimizeImports`, only the submodules you actually use are parsed.
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./out",
|
||||
optimizeImports: ["antd", "@mui/material", "lodash-es"],
|
||||
});
|
||||
```
|
||||
|
||||
For example, `import { Button } from 'antd'` normally parses all ~3000 modules that `antd/index.js` re-exports. With `optimizeImports: ['antd']`, only the `Button` submodule is parsed.
|
||||
|
||||
This works for **pure barrel files** — files where every named export is a re-export (`export { X } from './x'`). If a barrel file has any local exports (`export const foo = ...`), or if any importer uses `import *`, all submodules are loaded.
|
||||
|
||||
`export *` re-exports are always loaded (never deferred) to avoid circular resolution issues. Only named re-exports (`export { X } from './x'`) that aren't used by any importer are deferred.
|
||||
|
||||
**Automatic mode:** Packages with `"sideEffects": false` in their `package.json` get barrel optimization automatically — no `optimizeImports` config needed. Use `optimizeImports` for packages that don't have this field.
|
||||
|
||||
**Plugins:** Resolve and load plugins work correctly with barrel optimization. Deferred submodules go through the plugin pipeline when they are eventually loaded.
|
||||
|
||||
### metafile
|
||||
|
||||
Generate metadata about the build in a structured format. The metafile contains information about all input files, output files, their sizes, imports, and exports. This is useful for:
|
||||
|
||||
19
packages/bun-types/bun.d.ts
vendored
19
packages/bun-types/bun.d.ts
vendored
@@ -2644,6 +2644,25 @@ declare module "bun" {
|
||||
*/
|
||||
features?: string[];
|
||||
|
||||
/**
|
||||
* List of package names whose barrel files (re-export index files) should
|
||||
* be optimized. When a named import comes from one of these packages,
|
||||
* only the submodules actually used are parsed — unused re-exports are
|
||||
* skipped entirely.
|
||||
*
|
||||
* This is also enabled automatically for any package with
|
||||
* `"sideEffects": false` in its `package.json`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await Bun.build({
|
||||
* entrypoints: ['./app.ts'],
|
||||
* optimizeImports: ['antd', '@mui/material', 'lodash-es'],
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
optimizeImports?: string[];
|
||||
|
||||
/**
|
||||
* - When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* - When set to `false`, returns a {@link BuildOutput} with `{success: false}`
|
||||
|
||||
@@ -67,6 +67,15 @@ route_bundles: ArrayListUnmanaged(RouteBundle),
|
||||
graph_safety_lock: bun.safety.ThreadLock,
|
||||
client_graph: IncrementalGraph(.client),
|
||||
server_graph: IncrementalGraph(.server),
|
||||
/// Barrel files with deferred (is_unused) import records. These files must
|
||||
/// be re-parsed on every incremental build because the set of needed exports
|
||||
/// may have changed. Populated by applyBarrelOptimization.
|
||||
barrel_files_with_deferrals: bun.StringArrayHashMapUnmanaged(void) = .{},
|
||||
/// Accumulated barrel export requests across all builds. Maps barrel file
|
||||
/// path → set of export names that have been requested. This ensures that
|
||||
/// when a barrel is re-parsed in an incremental build, exports requested
|
||||
/// by non-stale files (from previous builds) are still kept.
|
||||
barrel_needed_exports: bun.StringArrayHashMapUnmanaged(bun.StringHashMapUnmanaged(void)) = .{},
|
||||
/// State populated during bundling and hot updates. Often cleared
|
||||
incremental_result: IncrementalResult,
|
||||
/// Quickly retrieve a framework route's index from its entry point file. These
|
||||
@@ -616,6 +625,23 @@ pub fn deinit(dev: *DevServer) void {
|
||||
},
|
||||
.server_graph = dev.server_graph.deinit(),
|
||||
.client_graph = dev.client_graph.deinit(),
|
||||
.barrel_files_with_deferrals = {
|
||||
for (dev.barrel_files_with_deferrals.keys()) |key| {
|
||||
alloc.free(key);
|
||||
}
|
||||
dev.barrel_files_with_deferrals.deinit(alloc);
|
||||
},
|
||||
.barrel_needed_exports = {
|
||||
var it = dev.barrel_needed_exports.iterator();
|
||||
while (it.next()) |entry| {
|
||||
var inner = entry.value_ptr.*;
|
||||
var inner_it = inner.keyIterator();
|
||||
while (inner_it.next()) |k| alloc.free(k.*);
|
||||
inner.deinit(alloc);
|
||||
alloc.free(entry.key_ptr.*);
|
||||
}
|
||||
dev.barrel_needed_exports.deinit(alloc);
|
||||
},
|
||||
.assets = dev.assets.deinit(alloc),
|
||||
.incremental_result = useAllFields(IncrementalResult, .{
|
||||
.had_adjusted_edges = {},
|
||||
@@ -2192,6 +2218,7 @@ pub fn finalizeBundle(
|
||||
) bun.JSError!void {
|
||||
assert(dev.magic == .valid);
|
||||
var had_sent_hmr_event = false;
|
||||
|
||||
defer {
|
||||
var heap = bv2.graph.heap;
|
||||
bv2.deinitWithoutFreeingArena();
|
||||
@@ -3070,6 +3097,11 @@ const CacheEntry = struct {
|
||||
};
|
||||
|
||||
pub fn isFileCached(dev: *DevServer, path: []const u8, side: bake.Graph) ?CacheEntry {
|
||||
// Barrel files with deferred records must always be re-parsed so the
|
||||
// barrel optimization can evaluate updated requested_exports.
|
||||
if (dev.barrel_files_with_deferrals.contains(path))
|
||||
return null;
|
||||
|
||||
dev.graph_safety_lock.lock();
|
||||
defer dev.graph_safety_lock.unlock();
|
||||
|
||||
|
||||
@@ -79,6 +79,8 @@ pub fn memoryCostDetailed(dev: *DevServer) MemoryCost {
|
||||
js_code += cost.code;
|
||||
source_maps += cost.source_maps;
|
||||
},
|
||||
.barrel_files_with_deferrals = {},
|
||||
.barrel_needed_exports = {},
|
||||
.assets = {
|
||||
assets += dev.assets.memoryCost();
|
||||
},
|
||||
|
||||
@@ -260,6 +260,10 @@ pub const JSBundler = struct {
|
||||
files: FileMap = .{},
|
||||
/// Generate metafile (JSON module graph)
|
||||
metafile: bool = false,
|
||||
/// Package names whose barrel files should be optimized.
|
||||
/// Named imports from these packages will only load the submodules
|
||||
/// that are actually used instead of parsing all re-exported submodules.
|
||||
optimize_imports: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||
|
||||
pub const CompileOptions = struct {
|
||||
compile_target: CompileTarget = .{},
|
||||
@@ -435,6 +439,7 @@ pub const JSBundler = struct {
|
||||
var this = Config{
|
||||
.entry_points = bun.StringSet.init(allocator),
|
||||
.external = bun.StringSet.init(allocator),
|
||||
.optimize_imports = bun.StringSet.init(allocator),
|
||||
.define = bun.StringMap.init(allocator, true),
|
||||
.dir = OwnedString.initEmpty(allocator),
|
||||
.outdir = OwnedString.initEmpty(allocator),
|
||||
@@ -819,6 +824,15 @@ pub const JSBundler = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (try config.getOwnArray(globalThis, "optimizeImports")) |optimize_imports| {
|
||||
var iter = try optimize_imports.arrayIterator(globalThis);
|
||||
while (try iter.next()) |entry| {
|
||||
var slice = try entry.toSliceOrNull(globalThis);
|
||||
defer slice.deinit();
|
||||
try this.optimize_imports.insert(slice.slice());
|
||||
}
|
||||
}
|
||||
|
||||
// if (try config.getOptional(globalThis, "dir", ZigString.Slice)) |slice| {
|
||||
// defer slice.deinit();
|
||||
// this.appendSliceExact(slice.slice()) catch unreachable;
|
||||
@@ -1103,6 +1117,7 @@ pub const JSBundler = struct {
|
||||
self.env_prefix.deinit();
|
||||
self.footer.deinit();
|
||||
self.tsconfig_override.deinit();
|
||||
self.optimize_imports.deinit();
|
||||
self.files.deinitAndUnprotect();
|
||||
self.metafile_json_path.deinit();
|
||||
self.metafile_markdown_path.deinit();
|
||||
|
||||
@@ -76,7 +76,14 @@ pub const InputFile = struct {
|
||||
additional_files: BabyList(AdditionalFile) = .{},
|
||||
unique_key_for_additional_file: string = "",
|
||||
content_hash_for_additional_file: u64 = 0,
|
||||
is_plugin_file: bool = false,
|
||||
flags: Flags = .{},
|
||||
|
||||
pub const Flags = packed struct(u8) {
|
||||
is_plugin_file: bool = false,
|
||||
/// Set when a barrel-eligible file has `export * from` this file.
|
||||
is_export_star_target: bool = false,
|
||||
_: u6 = 0,
|
||||
};
|
||||
};
|
||||
|
||||
pub inline fn pathToSourceIndexMap(this: *Graph, target: options.Target) *PathToSourceIndexMap {
|
||||
|
||||
@@ -1186,6 +1186,10 @@ pub const LinkerContext = struct {
|
||||
ast: *const JSAst,
|
||||
) !bool {
|
||||
const record = ast.import_records.at(import_record_index);
|
||||
// Barrel optimization: deferred import records should be dropped
|
||||
if (record.flags.is_unused) {
|
||||
return true;
|
||||
}
|
||||
// Is this an external import?
|
||||
if (!record.source_index.isValid()) {
|
||||
// Keep the "import" statement if import statements are supported
|
||||
@@ -2321,6 +2325,14 @@ pub const LinkerContext = struct {
|
||||
};
|
||||
}
|
||||
|
||||
// Barrel optimization: deferred import records point to empty ASTs
|
||||
if (record.flags.is_unused) {
|
||||
return .{
|
||||
.value = .{},
|
||||
.status = .external,
|
||||
};
|
||||
}
|
||||
|
||||
// Is this a disabled file?
|
||||
const other_source_index = record.source_index.get();
|
||||
const other_id = other_source_index;
|
||||
|
||||
@@ -34,6 +34,7 @@ emit_decorator_metadata: bool = false,
|
||||
experimental_decorators: bool = false,
|
||||
ctx: *BundleV2,
|
||||
package_version: string = "",
|
||||
package_name: string = "",
|
||||
is_entry_point: bool = false,
|
||||
|
||||
const ParseTaskStage = union(enum) {
|
||||
@@ -84,6 +85,9 @@ pub const Result = struct {
|
||||
content_hash_for_additional_file: u64 = 0,
|
||||
|
||||
loader: Loader,
|
||||
|
||||
/// The package name from package.json, used for barrel optimization.
|
||||
package_name: string = "",
|
||||
};
|
||||
|
||||
pub const Error = struct {
|
||||
@@ -121,6 +125,7 @@ pub fn init(resolve_result: *const _resolver.Result, source_index: Index, ctx: *
|
||||
.emit_decorator_metadata = resolve_result.flags.emit_decorator_metadata,
|
||||
.experimental_decorators = resolve_result.flags.experimental_decorators,
|
||||
.package_version = if (resolve_result.package_json) |package_json| package_json.version else "",
|
||||
.package_name = if (resolve_result.package_json) |package_json| package_json.name else "",
|
||||
.known_target = ctx.transpiler.options.target,
|
||||
};
|
||||
}
|
||||
@@ -1294,6 +1299,7 @@ fn runWithSourceCode(
|
||||
.unique_key_for_additional_file = unique_key_for_additional_file.key,
|
||||
.side_effects = task.side_effects,
|
||||
.loader = loader,
|
||||
.package_name = task.package_name,
|
||||
|
||||
// Hash the files in here so that we do it in parallel.
|
||||
.content_hash_for_additional_file = if (loader.shouldCopyForBundling())
|
||||
|
||||
496
src/bundler/barrel_imports.zig
Normal file
496
src/bundler/barrel_imports.zig
Normal file
@@ -0,0 +1,496 @@
|
||||
//! Barrel optimization: detect pure re-export barrel files and defer loading
|
||||
//! of unused submodules. Uses a persistent `requested_exports` map to track
|
||||
//! which exports have been requested from each barrel, providing cross-call
|
||||
//! deduplication and cycle detection (inspired by Rolldown's pattern).
|
||||
//!
|
||||
//! Import requests are recorded eagerly as each file is processed — before
|
||||
//! barrels are known. When a barrel later loads, applyBarrelOptimization reads
|
||||
//! `requested_exports` to see what's already been requested. No graph scan needed.
|
||||
|
||||
const log = Output.scoped(.barrel, .hidden);
|
||||
|
||||
pub const RequestedExports = union(enum) {
|
||||
all,
|
||||
partial: bun.StringArrayHashMapUnmanaged(void),
|
||||
};
|
||||
|
||||
const BarrelExportResolution = struct {
|
||||
import_record_index: u32,
|
||||
/// The original alias in the source module (e.g. "d" for `export { d as c }`)
|
||||
original_alias: ?[]const u8,
|
||||
};
|
||||
|
||||
/// Look up an export name → import_record_index by chasing
|
||||
/// named_exports[alias].ref through named_imports.
|
||||
/// Also returns the original alias from the source module for BFS propagation.
|
||||
fn resolveBarrelExport(alias: []const u8, named_exports: JSAst.NamedExports, named_imports: JSAst.NamedImports) ?BarrelExportResolution {
|
||||
const export_entry = named_exports.get(alias) orelse return null;
|
||||
const import_entry = named_imports.get(export_entry.ref) orelse return null;
|
||||
return .{ .import_record_index = import_entry.import_record_index, .original_alias = import_entry.alias };
|
||||
}
|
||||
|
||||
/// Analyze a parsed file to determine if it's a barrel and mark unneeded
|
||||
/// import records as is_unused so they won't be resolved. Runs BEFORE resolution.
|
||||
///
|
||||
/// A file qualifies as a barrel if:
|
||||
/// 1. It has `sideEffects: false` or is in `optimize_imports`, AND
|
||||
/// 2. All named exports are re-exports (no local definitions), AND
|
||||
/// 3. It is not an export star target of another barrel.
|
||||
///
|
||||
/// Export * records are never deferred (always resolved) to avoid circular races.
|
||||
pub fn applyBarrelOptimization(this: *BundleV2, parse_result: *ParseTask.Result) void {
|
||||
bun.handleOom(applyBarrelOptimizationImpl(this, parse_result));
|
||||
}
|
||||
|
||||
fn applyBarrelOptimizationImpl(this: *BundleV2, parse_result: *ParseTask.Result) !void {
|
||||
const result = &parse_result.value.success;
|
||||
const ast = &result.ast;
|
||||
const source_index = result.source.index.get();
|
||||
|
||||
const is_explicit = if (this.transpiler.options.optimize_imports) |oi| oi.map.contains(result.package_name) else false;
|
||||
const is_side_effects_false = result.side_effects == .no_side_effects__package_json;
|
||||
if (!is_explicit and !is_side_effects_false) return;
|
||||
if (ast.import_records.len == 0) return;
|
||||
if (ast.named_exports.count() == 0 and ast.export_star_import_records.len == 0) return;
|
||||
|
||||
const named_exports = ast.named_exports;
|
||||
const named_imports = ast.named_imports;
|
||||
|
||||
// Verify pure barrel: all named exports must be re-exports
|
||||
var export_iter = named_exports.iterator();
|
||||
while (export_iter.next()) |entry| {
|
||||
if (named_imports.get(entry.value_ptr.ref) == null) return;
|
||||
}
|
||||
|
||||
// If this barrel is a star target of another barrel, can't safely defer
|
||||
if (this.graph.input_files.items(.flags)[source_index].is_export_star_target) return;
|
||||
|
||||
// Check requested_exports to see which exports were already requested by
|
||||
// files parsed before this barrel. scheduleBarrelDeferredImports records
|
||||
// requests eagerly as each file is processed, so we don't need to scan
|
||||
// the graph.
|
||||
if (this.requested_exports.get(source_index)) |existing| {
|
||||
switch (existing) {
|
||||
.all => return, // import * already seen — load everything
|
||||
.partial => {},
|
||||
}
|
||||
}
|
||||
|
||||
// Build the set of needed import_record_indices from already-requested
|
||||
// export names. Export * records are always needed.
|
||||
var needed_records_stack = std.heap.stackFallback(8192, this.allocator());
|
||||
const needed_records_alloc = needed_records_stack.get();
|
||||
var needed_records = std.AutoArrayHashMapUnmanaged(u32, void){};
|
||||
defer needed_records.deinit(needed_records_alloc);
|
||||
|
||||
for (ast.export_star_import_records) |record_idx| {
|
||||
try needed_records.put(needed_records_alloc, record_idx, {});
|
||||
}
|
||||
|
||||
if (this.requested_exports.get(source_index)) |existing| {
|
||||
switch (existing) {
|
||||
.all => unreachable, // handled above
|
||||
.partial => |partial| {
|
||||
var partial_iter = partial.iterator();
|
||||
while (partial_iter.next()) |p_entry| {
|
||||
if (resolveBarrelExport(p_entry.key_ptr.*, named_exports, named_imports)) |resolution| {
|
||||
try needed_records.put(needed_records_alloc, resolution.import_record_index, {});
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Dev server: also include exports persisted from previous builds. This
|
||||
// handles the case where file A imports Alpha from the barrel (previous
|
||||
// build) and file B adds Beta (current build). Without this, Alpha would
|
||||
// be re-deferred because only B's requests are in requested_exports.
|
||||
if (this.transpiler.options.dev_server) |dev| {
|
||||
if (dev.barrel_needed_exports.get(result.source.path.text)) |persisted| {
|
||||
var persisted_iter = persisted.keyIterator();
|
||||
while (persisted_iter.next()) |alias_ptr| {
|
||||
if (resolveBarrelExport(alias_ptr.*, named_exports, named_imports)) |resolution| {
|
||||
try needed_records.put(needed_records_alloc, resolution.import_record_index, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Mark unneeded named re-export records as is_unused.
|
||||
var has_deferrals = false;
|
||||
export_iter = named_exports.iterator();
|
||||
while (export_iter.next()) |entry| {
|
||||
if (named_imports.get(entry.value_ptr.ref)) |imp| {
|
||||
if (!needed_records.contains(imp.import_record_index)) {
|
||||
if (imp.import_record_index < ast.import_records.len) {
|
||||
ast.import_records.slice()[imp.import_record_index].flags.is_unused = true;
|
||||
has_deferrals = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (has_deferrals) {
|
||||
log("barrel detected: {s} (source={d}, {d} deferred, {d} needed)", .{
|
||||
if (result.package_name.len > 0) result.package_name else result.source.path.text,
|
||||
source_index,
|
||||
named_exports.count() -| needed_records.count(),
|
||||
needed_records.count(),
|
||||
});
|
||||
|
||||
// Merge with existing entry (keep already-requested names) or create new
|
||||
const gop = try this.requested_exports.getOrPut(this.allocator(), source_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{ .partial = .{} };
|
||||
}
|
||||
|
||||
// Register with DevServer so isFileCached returns null for this barrel,
|
||||
// ensuring it gets re-parsed on every incremental build. This is needed
|
||||
// because the set of needed exports can change when importing files change.
|
||||
if (this.transpiler.options.dev_server) |dev| {
|
||||
const alloc = dev.allocator();
|
||||
const barrel_gop = try dev.barrel_files_with_deferrals.getOrPut(alloc, result.source.path.text);
|
||||
if (!barrel_gop.found_existing) {
|
||||
barrel_gop.key_ptr.* = try alloc.dupe(u8, result.source.path.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear is_unused on a deferred barrel record. Returns true if the record was un-deferred.
|
||||
fn unDeferRecord(import_records: *ImportRecord.List, record_idx: u32) bool {
|
||||
if (record_idx >= import_records.len) return false;
|
||||
const rec = &import_records.slice()[record_idx];
|
||||
if (rec.flags.is_internal or !rec.flags.is_unused) return false;
|
||||
rec.flags.is_unused = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/// BFS work queue item: un-defer an export from a barrel.
|
||||
const BarrelWorkItem = struct { barrel_source_index: u32, alias: []const u8, is_star: bool };
|
||||
|
||||
/// Resolve, process, and patch import records for a single barrel.
|
||||
/// Used to inline-resolve deferred records whose source_index is still invalid.
|
||||
fn resolveBarrelRecords(this: *BundleV2, barrel_idx: u32, barrels_to_resolve: *std.AutoArrayHashMapUnmanaged(u32, void)) i32 {
|
||||
const graph_ast = this.graph.ast.slice();
|
||||
const barrel_ir = &graph_ast.items(.import_records)[barrel_idx];
|
||||
const target = graph_ast.items(.target)[barrel_idx];
|
||||
var resolve_result = this.resolveImportRecords(.{
|
||||
.import_records = barrel_ir,
|
||||
.source = &this.graph.input_files.items(.source)[barrel_idx],
|
||||
.loader = this.graph.input_files.items(.loader)[barrel_idx],
|
||||
.target = target,
|
||||
});
|
||||
defer resolve_result.resolve_queue.deinit();
|
||||
const scheduled = this.processResolveQueue(resolve_result.resolve_queue, target, barrel_idx);
|
||||
// Re-derive pointer after processResolveQueue may have reallocated graph.ast
|
||||
const barrel_ir_updated = &this.graph.ast.slice().items(.import_records)[barrel_idx];
|
||||
this.patchImportRecordSourceIndices(barrel_ir_updated, .{
|
||||
.source_index = Index.init(barrel_idx),
|
||||
.source_path = this.graph.input_files.items(.source)[barrel_idx].path.text,
|
||||
.loader = this.graph.input_files.items(.loader)[barrel_idx],
|
||||
.target = target,
|
||||
.force_save = true,
|
||||
});
|
||||
_ = barrels_to_resolve.swapRemove(barrel_idx);
|
||||
return scheduled;
|
||||
}
|
||||
|
||||
/// After a new file's import records are patched with source_indices,
|
||||
/// record what this file requests from each target in requested_exports
|
||||
/// (eagerly, before barrels are known), then BFS through barrel chains
|
||||
/// to un-defer needed records. Un-deferred records are re-resolved through
|
||||
/// resolveImportRecords (same path as initial resolution).
|
||||
/// Returns the number of newly scheduled parse tasks.
|
||||
pub fn scheduleBarrelDeferredImports(this: *BundleV2, result: *ParseTask.Result.Success) !i32 {
|
||||
const file_import_records = result.ast.import_records;
|
||||
|
||||
// Phase 1: Seed — eagerly record what this file requests from each target.
|
||||
// This runs for every file, even before any barrels are known. When a barrel
|
||||
// is later parsed, applyBarrelOptimization reads these pre-recorded requests
|
||||
// to decide which exports to keep. O(file's imports) per file.
|
||||
|
||||
// Build a set of import_record_indices that have named_imports entries,
|
||||
// so we can detect bare imports (those with no specific export bindings).
|
||||
var named_ir_indices_stack = std.heap.stackFallback(4096, this.allocator());
|
||||
const named_ir_indices_alloc = named_ir_indices_stack.get();
|
||||
var named_ir_indices = std.AutoArrayHashMapUnmanaged(u32, void){};
|
||||
defer named_ir_indices.deinit(named_ir_indices_alloc);
|
||||
|
||||
// In dev server mode, patchImportRecordSourceIndices skips saving source_indices
|
||||
// on import records (the dev server uses path-based identifiers instead). But
|
||||
// barrel optimization requires source_indices to seed requested_exports and to
|
||||
// BFS un-defer records. Resolve paths → source_indices here as a fallback.
|
||||
const path_to_source_index_map = if (this.transpiler.options.dev_server != null)
|
||||
this.pathToSourceIndexMap(result.ast.target)
|
||||
else
|
||||
null;
|
||||
|
||||
var ni_iter = result.ast.named_imports.iterator();
|
||||
while (ni_iter.next()) |ni_entry| {
|
||||
const ni = ni_entry.value_ptr;
|
||||
if (ni.import_record_index >= file_import_records.len) continue;
|
||||
try named_ir_indices.put(named_ir_indices_alloc, ni.import_record_index, {});
|
||||
const ir = file_import_records.slice()[ni.import_record_index];
|
||||
// In dev server mode, source_index may not be patched — resolve via
|
||||
// path map as a read-only fallback. Do NOT write back to the import
|
||||
// record — the dev server intentionally leaves source_indices unset
|
||||
// and other code (IncrementalGraph, printer) depends on that.
|
||||
const target = if (ir.source_index.isValid())
|
||||
ir.source_index.get()
|
||||
else if (path_to_source_index_map) |map|
|
||||
map.getPath(&ir.path) orelse continue
|
||||
else
|
||||
continue;
|
||||
|
||||
const gop = try this.requested_exports.getOrPut(this.allocator(), target);
|
||||
if (ni.alias_is_star) {
|
||||
gop.value_ptr.* = .all;
|
||||
} else if (ni.alias) |alias| {
|
||||
if (gop.found_existing) {
|
||||
switch (gop.value_ptr.*) {
|
||||
.all => {},
|
||||
.partial => |*p| try p.put(this.allocator(), alias, {}),
|
||||
}
|
||||
} else {
|
||||
gop.value_ptr.* = .{ .partial = .{} };
|
||||
try gop.value_ptr.partial.put(this.allocator(), alias, {});
|
||||
}
|
||||
// Persist the export request on DevServer so it survives across builds.
|
||||
if (this.transpiler.options.dev_server) |dev| {
|
||||
persistBarrelExport(dev, ir.path.text, alias);
|
||||
}
|
||||
} else if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{ .partial = .{} };
|
||||
}
|
||||
}
|
||||
|
||||
// Handle import records without named bindings (not in named_imports).
|
||||
// - `import "x"` (bare statement): tree-shakeable with sideEffects: false — skip.
|
||||
// - `require("x")`: synchronous, needs full module — always mark as .all.
|
||||
// - `import("x")`: mark as .all ONLY if the barrel has no prior requests,
|
||||
// meaning this is the sole reference. If the barrel already has a .partial
|
||||
// entry from a static import, the dynamic import is likely a secondary
|
||||
// (possibly circular) reference and should not escalate requirements.
|
||||
for (file_import_records.slice(), 0..) |ir, idx| {
|
||||
const target = if (ir.source_index.isValid())
|
||||
ir.source_index.get()
|
||||
else if (path_to_source_index_map) |map|
|
||||
map.getPath(&ir.path) orelse continue
|
||||
else
|
||||
continue;
|
||||
if (ir.flags.is_internal) continue;
|
||||
if (named_ir_indices.contains(@intCast(idx))) continue;
|
||||
if (ir.flags.was_originally_bare_import) continue;
|
||||
if (ir.kind == .require) {
|
||||
const gop = try this.requested_exports.getOrPut(this.allocator(), target);
|
||||
gop.value_ptr.* = .all;
|
||||
} else if (ir.kind == .dynamic) {
|
||||
// Only escalate to .all if no prior requests exist for this target.
|
||||
if (!this.requested_exports.contains(target)) {
|
||||
try this.requested_exports.put(this.allocator(), target, .all);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: BFS — un-defer barrel records that are now needed.
|
||||
// Build work queue from this file's named_imports, then propagate
|
||||
// through chains of barrels. Only runs real work when barrels exist
|
||||
// (targets with deferred records).
|
||||
var queue_stack = std.heap.stackFallback(8192, this.allocator());
|
||||
const queue_alloc = queue_stack.get();
|
||||
var queue = std.ArrayListUnmanaged(BarrelWorkItem){};
|
||||
defer queue.deinit(queue_alloc);
|
||||
|
||||
ni_iter = result.ast.named_imports.iterator();
|
||||
while (ni_iter.next()) |ni_entry| {
|
||||
const ni = ni_entry.value_ptr;
|
||||
if (ni.import_record_index >= file_import_records.len) continue;
|
||||
const ir = file_import_records.slice()[ni.import_record_index];
|
||||
const ir_target = if (ir.source_index.isValid())
|
||||
ir.source_index.get()
|
||||
else if (path_to_source_index_map) |map|
|
||||
map.getPath(&ir.path) orelse continue
|
||||
else
|
||||
continue;
|
||||
|
||||
if (ni.alias_is_star) {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = ir_target, .alias = "", .is_star = true });
|
||||
} else if (ni.alias) |alias| {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = ir_target, .alias = alias, .is_star = false });
|
||||
}
|
||||
}
|
||||
|
||||
// Add bare require/dynamic-import targets to BFS as star imports (matching
|
||||
// the seeding logic above — require always, dynamic only when sole reference).
|
||||
for (file_import_records.slice(), 0..) |ir, idx| {
|
||||
const target = if (ir.source_index.isValid())
|
||||
ir.source_index.get()
|
||||
else if (path_to_source_index_map) |map|
|
||||
map.getPath(&ir.path) orelse continue
|
||||
else
|
||||
continue;
|
||||
if (ir.flags.is_internal) continue;
|
||||
if (named_ir_indices.contains(@intCast(idx))) continue;
|
||||
if (ir.flags.was_originally_bare_import) continue;
|
||||
const is_all = if (this.requested_exports.get(target)) |re| re == .all else false;
|
||||
const should_add = ir.kind == .require or (ir.kind == .dynamic and is_all);
|
||||
if (should_add) {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = target, .alias = "", .is_star = true });
|
||||
}
|
||||
}
|
||||
|
||||
// Also seed the BFS with exports previously requested from THIS file
|
||||
// that couldn't propagate because this file wasn't parsed yet.
|
||||
// This handles the case where file A requests export "d" from file B,
|
||||
// but B hadn't been parsed when A's BFS ran, so B's export * records
|
||||
// were empty and the propagation stopped.
|
||||
const this_source_index = result.source.index.get();
|
||||
if (this.requested_exports.get(this_source_index)) |existing| {
|
||||
switch (existing) {
|
||||
.all => try queue.append(queue_alloc, .{ .barrel_source_index = this_source_index, .alias = "", .is_star = true }),
|
||||
.partial => |partial| {
|
||||
var partial_iter = partial.iterator();
|
||||
while (partial_iter.next()) |p_entry| {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = this_source_index, .alias = p_entry.key_ptr.*, .is_star = false });
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (queue.items.len == 0) return 0;
|
||||
|
||||
// Items [0, initial_queue_len) are from this file's imports and were
|
||||
// already recorded in requested_exports during seeding (phase 1).
|
||||
// Skip dedup for them so un-deferral proceeds correctly.
|
||||
// Items added during BFS propagation (>= initial_queue_len) use normal
|
||||
// dedup via requested_exports to prevent cycles.
|
||||
const initial_queue_len = queue.items.len;
|
||||
|
||||
var barrels_to_resolve = std.AutoArrayHashMapUnmanaged(u32, void){};
|
||||
var barrels_to_resolve_stack = std.heap.stackFallback(1024, this.allocator());
|
||||
const barrels_to_resolve_alloc = barrels_to_resolve_stack.get();
|
||||
defer barrels_to_resolve.deinit(barrels_to_resolve_alloc);
|
||||
|
||||
var newly_scheduled: i32 = 0;
|
||||
var qi: usize = 0;
|
||||
while (qi < queue.items.len) : (qi += 1) {
|
||||
const item = queue.items[qi];
|
||||
const barrel_idx = item.barrel_source_index;
|
||||
|
||||
// For BFS-propagated items (not from initial queue), use
|
||||
// requested_exports for dedup and cycle detection.
|
||||
if (qi >= initial_queue_len) {
|
||||
const gop = try this.requested_exports.getOrPut(this.allocator(), barrel_idx);
|
||||
if (item.is_star) {
|
||||
gop.value_ptr.* = .all;
|
||||
} else if (gop.found_existing) {
|
||||
switch (gop.value_ptr.*) {
|
||||
.all => continue,
|
||||
.partial => |*p| {
|
||||
const alias_gop = try p.getOrPut(this.allocator(), item.alias);
|
||||
if (alias_gop.found_existing) continue;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
gop.value_ptr.* = .{ .partial = .{} };
|
||||
try gop.value_ptr.partial.put(this.allocator(), item.alias, {});
|
||||
}
|
||||
}
|
||||
|
||||
if (barrel_idx >= this.graph.ast.len) continue;
|
||||
|
||||
// Use a helper to get barrel_ir freshly each time, since
|
||||
// resolveBarrelRecords can reallocate graph.ast and invalidate pointers.
|
||||
var barrel_ir = &this.graph.ast.slice().items(.import_records)[barrel_idx];
|
||||
|
||||
if (item.is_star) {
|
||||
for (barrel_ir.slice(), 0..) |rec, idx| {
|
||||
if (rec.flags.is_unused and !rec.flags.is_internal) {
|
||||
if (unDeferRecord(barrel_ir, @intCast(idx))) {
|
||||
try barrels_to_resolve.put(barrels_to_resolve_alloc, barrel_idx, {});
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const alias = item.alias;
|
||||
const graph_ast_snapshot = this.graph.ast.slice();
|
||||
const resolution = resolveBarrelExport(alias, graph_ast_snapshot.items(.named_exports)[barrel_idx], graph_ast_snapshot.items(.named_imports)[barrel_idx]) orelse {
|
||||
// Name not in named re-exports — might come from export *.
|
||||
for (graph_ast_snapshot.items(.export_star_import_records)[barrel_idx]) |star_idx| {
|
||||
if (star_idx >= barrel_ir.len) continue;
|
||||
if (unDeferRecord(barrel_ir, star_idx)) {
|
||||
try barrels_to_resolve.put(barrels_to_resolve_alloc, barrel_idx, {});
|
||||
}
|
||||
var star_rec = barrel_ir.slice()[star_idx];
|
||||
if (!star_rec.source_index.isValid()) {
|
||||
// Deferred record was never resolved — resolve inline now.
|
||||
newly_scheduled += resolveBarrelRecords(this, barrel_idx, &barrels_to_resolve);
|
||||
// Re-derive pointer after resolution may have mutated slices.
|
||||
barrel_ir = &this.graph.ast.slice().items(.import_records)[barrel_idx];
|
||||
star_rec = barrel_ir.slice()[star_idx];
|
||||
}
|
||||
if (star_rec.source_index.isValid()) {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = star_rec.source_index.get(), .alias = alias, .is_star = false });
|
||||
}
|
||||
}
|
||||
continue;
|
||||
};
|
||||
|
||||
if (unDeferRecord(barrel_ir, resolution.import_record_index)) {
|
||||
try barrels_to_resolve.put(barrels_to_resolve_alloc, barrel_idx, {});
|
||||
}
|
||||
|
||||
const propagate_alias = resolution.original_alias orelse alias;
|
||||
if (resolution.import_record_index < barrel_ir.len) {
|
||||
var rec = barrel_ir.slice()[resolution.import_record_index];
|
||||
if (!rec.source_index.isValid()) {
|
||||
// Deferred record was never resolved — resolve inline now.
|
||||
newly_scheduled += resolveBarrelRecords(this, barrel_idx, &barrels_to_resolve);
|
||||
barrel_ir = &this.graph.ast.slice().items(.import_records)[barrel_idx];
|
||||
rec = barrel_ir.slice()[resolution.import_record_index];
|
||||
}
|
||||
if (rec.source_index.isValid()) {
|
||||
try queue.append(queue_alloc, .{ .barrel_source_index = rec.source_index.get(), .alias = propagate_alias, .is_star = false });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Re-resolve any remaining un-deferred records through the normal resolution path.
|
||||
while (barrels_to_resolve.count() > 0) {
|
||||
const barrel_source_index = barrels_to_resolve.keys()[0];
|
||||
newly_scheduled += resolveBarrelRecords(this, barrel_source_index, &barrels_to_resolve);
|
||||
}
|
||||
|
||||
return newly_scheduled;
|
||||
}
|
||||
|
||||
/// Persist an export name for a barrel file on the DevServer. Called during
|
||||
/// seeding so that exports requested in previous builds are not lost when the
|
||||
/// barrel is re-parsed in an incremental build where the requesting file is
|
||||
/// not stale.
|
||||
fn persistBarrelExport(dev: *bun.bake.DevServer, barrel_path: []const u8, alias: []const u8) void {
|
||||
const alloc = dev.allocator();
|
||||
const outer_gop = dev.barrel_needed_exports.getOrPut(alloc, barrel_path) catch return;
|
||||
if (!outer_gop.found_existing) {
|
||||
outer_gop.key_ptr.* = alloc.dupe(u8, barrel_path) catch return;
|
||||
outer_gop.value_ptr.* = .{};
|
||||
}
|
||||
const inner_gop = outer_gop.value_ptr.getOrPut(alloc, alias) catch return;
|
||||
if (!inner_gop.found_existing) {
|
||||
inner_gop.key_ptr.* = alloc.dupe(u8, alias) catch return;
|
||||
}
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const BundleV2 = @import("./bundle_v2.zig").BundleV2;
|
||||
const ParseTask = @import("./ParseTask.zig").ParseTask;
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const Output = bun.Output;
|
||||
|
||||
const Index = bun.ast.Index;
|
||||
const JSAst = bun.ast.BundledAst;
|
||||
@@ -150,6 +150,15 @@ pub const BundleV2 = struct {
|
||||
// if false we can skip TLA validation and propagation
|
||||
has_any_top_level_await_modules: bool = false,
|
||||
|
||||
/// Barrel optimization: tracks which exports have been requested from each
|
||||
/// module encountered during barrel BFS. Keys are source_indices. Values
|
||||
/// track requested export names for deduplication and cycle detection.
|
||||
/// Persists across calls to scheduleBarrelDeferredImports so cross-file
|
||||
/// deduplication is free.
|
||||
requested_exports: std.AutoArrayHashMapUnmanaged(u32, barrel_imports.RequestedExports) = .{},
|
||||
|
||||
const barrel_imports = @import("./barrel_imports.zig");
|
||||
|
||||
const BakeOptions = struct {
|
||||
framework: bake.Framework,
|
||||
client_transpiler: *Transpiler,
|
||||
@@ -1998,6 +2007,9 @@ pub const BundleV2 = struct {
|
||||
transpiler.options.metafile = config.metafile;
|
||||
transpiler.options.metafile_json_path = config.metafile_json_path.slice();
|
||||
transpiler.options.metafile_markdown_path = config.metafile_markdown_path.slice();
|
||||
if (config.optimize_imports.count() > 0) {
|
||||
transpiler.options.optimize_imports = &config.optimize_imports;
|
||||
}
|
||||
|
||||
if (transpiler.options.compile) {
|
||||
// Emitting DCE annotations is nonsensical in --compile.
|
||||
@@ -2419,7 +2431,7 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
this.graph.input_files.items(.loader)[load.source_index.get()] = code.loader;
|
||||
this.graph.input_files.items(.source)[load.source_index.get()].contents = code.source_code;
|
||||
this.graph.input_files.items(.is_plugin_file)[load.source_index.get()] = true;
|
||||
this.graph.input_files.items(.flags)[load.source_index.get()].is_plugin_file = true;
|
||||
var parse_task = load.parse_task;
|
||||
parse_task.loader = code.loader;
|
||||
if (!should_copy_for_bundling) this.free_list.append(code.source_code) catch unreachable;
|
||||
@@ -3278,6 +3290,18 @@ pub const BundleV2 = struct {
|
||||
this.graph.ast.appendAssumeCapacity(JSAst.empty);
|
||||
}
|
||||
|
||||
/// See barrel_imports.zig for barrel optimization implementation.
|
||||
const applyBarrelOptimization = barrel_imports.applyBarrelOptimization;
|
||||
const scheduleBarrelDeferredImports = barrel_imports.scheduleBarrelDeferredImports;
|
||||
|
||||
/// Returns true when barrel optimization is enabled. Barrel optimization
|
||||
/// can apply to any package with sideEffects: false or listed in
|
||||
/// optimize_imports, so it is always enabled during bundling.
|
||||
fn isBarrelOptimizationEnabled(this: *const BundleV2) bool {
|
||||
_ = this;
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO: remove ResolveQueue
|
||||
//
|
||||
// Moving this to the Bundle thread was a significant perf improvement on Linux for first builds
|
||||
@@ -3285,18 +3309,68 @@ pub const BundleV2 = struct {
|
||||
// The problem is that module resolution has many mutexes.
|
||||
// The downside is cached resolutions are faster to do in threads since they only lock very briefly.
|
||||
fn runResolutionForParseTask(parse_result: *ParseTask.Result, this: *BundleV2) ResolveQueue {
|
||||
var ast = &parse_result.value.success.ast;
|
||||
const source = &parse_result.value.success.source;
|
||||
const loader = parse_result.value.success.loader;
|
||||
const result = &parse_result.value.success;
|
||||
// Capture these before resolveImportRecords, since on error we overwrite
|
||||
// parse_result.value (invalidating the `result` pointer).
|
||||
const source_index = result.source.index;
|
||||
const target = result.ast.target;
|
||||
var resolve_result = this.resolveImportRecords(.{
|
||||
.import_records = &result.ast.import_records,
|
||||
.source = &result.source,
|
||||
.loader = result.loader,
|
||||
.target = target,
|
||||
});
|
||||
|
||||
if (resolve_result.last_error) |err| {
|
||||
debug("failed with error: {s}", .{@errorName(err)});
|
||||
resolve_result.resolve_queue.clearAndFree();
|
||||
parse_result.value = .{
|
||||
.err = .{
|
||||
.err = err,
|
||||
.step = .resolve,
|
||||
.log = Logger.Log.init(bun.default_allocator),
|
||||
.source_index = source_index,
|
||||
.target = target,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return resolve_result.resolve_queue;
|
||||
}
|
||||
|
||||
pub const ResolveImportRecordCtx = struct {
|
||||
import_records: *ImportRecord.List,
|
||||
source: *const Logger.Source,
|
||||
loader: Loader,
|
||||
target: options.Target,
|
||||
};
|
||||
|
||||
pub const ResolveImportRecordResult = struct {
|
||||
resolve_queue: ResolveQueue,
|
||||
last_error: ?anyerror,
|
||||
};
|
||||
|
||||
/// Resolve all unresolved import records for a module. Skips records that
|
||||
/// are already resolved (valid source_index), unused, or internal.
|
||||
/// Returns a resolve queue of new modules to schedule, plus any fatal error.
|
||||
/// Used by both initial parse resolution and barrel un-deferral.
|
||||
pub fn resolveImportRecords(this: *BundleV2, ctx: ResolveImportRecordCtx) ResolveImportRecordResult {
|
||||
const source = ctx.source;
|
||||
const loader = ctx.loader;
|
||||
const source_dir = source.path.sourceDir();
|
||||
var estimated_resolve_queue_count: usize = 0;
|
||||
for (ast.import_records.slice()) |*import_record| {
|
||||
for (ctx.import_records.slice()) |*import_record| {
|
||||
if (import_record.flags.is_internal) {
|
||||
import_record.tag = .runtime;
|
||||
import_record.source_index = Index.runtime;
|
||||
}
|
||||
|
||||
if (import_record.flags.is_unused) {
|
||||
// For non-dev-server builds, barrel-deferred records need their
|
||||
// source_index cleared so they don't get linked. For dev server,
|
||||
// skip this — is_unused is also set by ConvertESMExportsForHmr
|
||||
// deduplication, and clearing those source_indices breaks module
|
||||
// identity (e.g., __esModule on ESM namespace objects).
|
||||
if (import_record.flags.is_unused and this.transpiler.options.dev_server == null) {
|
||||
import_record.source_index = Index.invalid;
|
||||
}
|
||||
|
||||
@@ -3307,7 +3381,7 @@ pub const BundleV2 = struct {
|
||||
|
||||
var last_error: ?anyerror = null;
|
||||
|
||||
outer: for (ast.import_records.slice(), 0..) |*import_record, i| {
|
||||
outer: for (ctx.import_records.slice(), 0..) |*import_record, i| {
|
||||
// Preserve original import specifier before resolution modifies path
|
||||
if (import_record.original_path.len == 0) {
|
||||
import_record.original_path = import_record.path.text;
|
||||
@@ -3327,7 +3401,7 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
|
||||
if (this.framework) |fw| if (fw.server_components != null) {
|
||||
switch (ast.target.isServerSide()) {
|
||||
switch (ctx.target.isServerSide()) {
|
||||
inline else => |is_server| {
|
||||
const src = if (is_server) bake.server_virtual_source else bake.client_virtual_source;
|
||||
if (strings.eqlComptime(import_record.path.text, src.path.pretty)) {
|
||||
@@ -3357,7 +3431,7 @@ pub const BundleV2 = struct {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ast.target.isBun()) {
|
||||
if (ctx.target.isBun()) {
|
||||
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, .bun, .{ .rewrite_jest_for_tests = this.transpiler.options.rewrite_jest_for_tests })) |replacement| {
|
||||
// When bundling node builtins, remove the "node:" prefix.
|
||||
// This supports special use cases where the bundle is put
|
||||
@@ -3394,7 +3468,7 @@ pub const BundleV2 = struct {
|
||||
import_record.flags.is_external_without_side_effects = true;
|
||||
}
|
||||
|
||||
if (this.enqueueOnResolvePluginIfNeeded(source.index.get(), import_record, source.path.text, @as(u32, @truncate(i)), ast.target)) {
|
||||
if (this.enqueueOnResolvePluginIfNeeded(source.index.get(), import_record, source.path.text, @as(u32, @truncate(i)), ctx.target)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -3430,9 +3504,9 @@ pub const BundleV2 = struct {
|
||||
.bake_server_components_ssr,
|
||||
};
|
||||
} else .{
|
||||
this.transpilerForTarget(ast.target),
|
||||
ast.target.bakeGraph(),
|
||||
ast.target,
|
||||
this.transpilerForTarget(ctx.target),
|
||||
ctx.target.bakeGraph(),
|
||||
ctx.target,
|
||||
};
|
||||
|
||||
// Check the FileMap first for in-memory files
|
||||
@@ -3505,7 +3579,7 @@ pub const BundleV2 = struct {
|
||||
dev.directory_watchers.trackResolutionFailure(
|
||||
source.path.text,
|
||||
import_record.path.text,
|
||||
ast.target.bakeGraph(), // use the source file target not the altered one
|
||||
ctx.target.bakeGraph(), // use the source file target not the altered one
|
||||
loader,
|
||||
) catch |e| bun.handleOom(e);
|
||||
}
|
||||
@@ -3525,7 +3599,7 @@ pub const BundleV2 = struct {
|
||||
if (!import_record.flags.handles_import_errors and !this.transpiler.options.ignore_module_resolution_errors) {
|
||||
last_error = err;
|
||||
if (isPackagePath(import_record.path.text)) {
|
||||
if (ast.target == .browser and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
|
||||
if (ctx.target == .browser and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
|
||||
addError(
|
||||
log,
|
||||
source,
|
||||
@@ -3542,7 +3616,7 @@ pub const BundleV2 = struct {
|
||||
},
|
||||
import_record.kind,
|
||||
) catch |e| bun.handleOom(e);
|
||||
} else if (!ast.target.isBun() and strings.eqlComptime(import_record.path.text, "bun")) {
|
||||
} else if (!ctx.target.isBun() and strings.eqlComptime(import_record.path.text, "bun")) {
|
||||
addError(
|
||||
log,
|
||||
source,
|
||||
@@ -3559,7 +3633,7 @@ pub const BundleV2 = struct {
|
||||
},
|
||||
import_record.kind,
|
||||
) catch |e| bun.handleOom(e);
|
||||
} else if (!ast.target.isBun() and strings.hasPrefixComptime(import_record.path.text, "bun:")) {
|
||||
} else if (!ctx.target.isBun() and strings.hasPrefixComptime(import_record.path.text, "bun:")) {
|
||||
addError(
|
||||
log,
|
||||
source,
|
||||
@@ -3744,21 +3818,126 @@ pub const BundleV2 = struct {
|
||||
}
|
||||
}
|
||||
|
||||
if (last_error) |err| {
|
||||
debug("failed with error: {s}", .{@errorName(err)});
|
||||
resolve_queue.clearAndFree();
|
||||
parse_result.value = .{
|
||||
.err = .{
|
||||
.err = err,
|
||||
.step = .resolve,
|
||||
.log = Logger.Log.init(bun.default_allocator),
|
||||
.source_index = source.index,
|
||||
.target = ast.target,
|
||||
},
|
||||
};
|
||||
return .{ .resolve_queue = resolve_queue, .last_error = last_error };
|
||||
}
|
||||
|
||||
/// Process a resolve queue: create input file slots and schedule parse tasks.
|
||||
/// Returns the number of newly scheduled tasks (for pending_items accounting).
|
||||
pub fn processResolveQueue(this: *BundleV2, resolve_queue: ResolveQueue, target: options.Target, importer_source_index: Index.Int) i32 {
|
||||
var diff: i32 = 0;
|
||||
const graph = &this.graph;
|
||||
var iter = resolve_queue.iterator();
|
||||
const path_to_source_index_map = this.pathToSourceIndexMap(target);
|
||||
while (iter.next()) |entry| {
|
||||
const value: *ParseTask = entry.value_ptr.*;
|
||||
const loader = value.loader orelse value.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
|
||||
const is_html_entrypoint = loader == .html and target.isServerSide() and this.transpiler.options.dev_server == null;
|
||||
const map: *PathToSourceIndexMap = if (is_html_entrypoint) this.pathToSourceIndexMap(.browser) else path_to_source_index_map;
|
||||
const existing = map.getOrPut(this.allocator(), entry.key_ptr.*) catch unreachable;
|
||||
|
||||
if (!existing.found_existing) {
|
||||
var new_task: *ParseTask = value;
|
||||
var new_input_file = Graph.InputFile{
|
||||
.source = Logger.Source.initEmptyFile(new_task.path.text),
|
||||
.side_effects = value.side_effects,
|
||||
.secondary_path = if (value.secondary_path_for_commonjs_interop) |*secondary_path| secondary_path.text else "",
|
||||
};
|
||||
|
||||
graph.has_any_secondary_paths = graph.has_any_secondary_paths or new_input_file.secondary_path.len > 0;
|
||||
|
||||
new_input_file.source.index = Index.source(graph.input_files.len);
|
||||
new_input_file.source.path = new_task.path;
|
||||
new_input_file.loader = loader;
|
||||
new_task.source_index = new_input_file.source.index;
|
||||
new_task.ctx = this;
|
||||
existing.value_ptr.* = new_task.source_index.get();
|
||||
|
||||
diff += 1;
|
||||
|
||||
graph.input_files.append(this.allocator(), new_input_file) catch unreachable;
|
||||
graph.ast.append(this.allocator(), JSAst.empty) catch unreachable;
|
||||
|
||||
if (is_html_entrypoint) {
|
||||
this.ensureClientTranspiler();
|
||||
this.graph.entry_points.append(this.allocator(), new_input_file.source.index) catch unreachable;
|
||||
}
|
||||
|
||||
if (this.enqueueOnLoadPluginIfNeeded(new_task)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (loader.shouldCopyForBundling()) {
|
||||
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[importer_source_index];
|
||||
bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = new_task.source_index.get() }));
|
||||
graph.input_files.items(.side_effects)[new_task.source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data;
|
||||
graph.estimated_file_loader_count += 1;
|
||||
}
|
||||
|
||||
graph.pool.schedule(new_task);
|
||||
} else {
|
||||
if (loader.shouldCopyForBundling()) {
|
||||
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[importer_source_index];
|
||||
bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = existing.value_ptr.* }));
|
||||
graph.estimated_file_loader_count += 1;
|
||||
}
|
||||
|
||||
bun.default_allocator.destroy(value);
|
||||
}
|
||||
}
|
||||
return diff;
|
||||
}
|
||||
|
||||
pub const PatchImportRecordsCtx = struct {
|
||||
source_index: Index,
|
||||
source_path: []const u8,
|
||||
loader: Loader,
|
||||
target: options.Target,
|
||||
redirect_import_record_index: u32 = std.math.maxInt(u32),
|
||||
/// When true, always save source indices regardless of dev_server/loader.
|
||||
/// Used for barrel un-deferral where records must always be connected.
|
||||
force_save: bool = false,
|
||||
};
|
||||
|
||||
/// Patch source_index on import records from pathToSourceIndexMap and
|
||||
/// resolve_tasks_waiting_for_import_source_index. Called after
|
||||
/// processResolveQueue has registered new modules.
|
||||
pub fn patchImportRecordSourceIndices(this: *BundleV2, import_records: *ImportRecord.List, ctx: PatchImportRecordsCtx) void {
|
||||
const graph = &this.graph;
|
||||
const input_file_loaders = graph.input_files.items(.loader);
|
||||
const save_import_record_source_index = ctx.force_save or
|
||||
this.transpiler.options.dev_server == null or
|
||||
ctx.loader == .html or
|
||||
ctx.loader.isCSS();
|
||||
|
||||
if (this.resolve_tasks_waiting_for_import_source_index.fetchSwapRemove(ctx.source_index.get())) |pending_entry| {
|
||||
var value = pending_entry.value;
|
||||
for (value.slice()) |to_assign| {
|
||||
if (save_import_record_source_index or
|
||||
input_file_loaders[to_assign.to_source_index.get()].isCSS())
|
||||
{
|
||||
import_records.slice()[to_assign.import_record_index].source_index = to_assign.to_source_index;
|
||||
}
|
||||
}
|
||||
value.deinit(this.allocator());
|
||||
}
|
||||
|
||||
return resolve_queue;
|
||||
const path_to_source_index_map = this.pathToSourceIndexMap(ctx.target);
|
||||
for (import_records.slice(), 0..) |*record, i| {
|
||||
if (path_to_source_index_map.getPath(&record.path)) |source_index| {
|
||||
if (save_import_record_source_index or input_file_loaders[source_index].isCSS())
|
||||
record.source_index.value = source_index;
|
||||
|
||||
if (getRedirectId(ctx.redirect_import_record_index)) |compare| {
|
||||
if (compare == @as(u32, @truncate(i))) {
|
||||
path_to_source_index_map.put(
|
||||
this.allocator(),
|
||||
ctx.source_path,
|
||||
source_index,
|
||||
) catch unreachable;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn generateServerHTMLModule(this: *BundleV2, path: *const Fs.Path, target: options.Target, import_record: *ImportRecord, path_text: []const u8) !void {
|
||||
@@ -3857,6 +4036,8 @@ pub const BundleV2 = struct {
|
||||
var process_log = true;
|
||||
|
||||
if (parse_result.value == .success) {
|
||||
this.applyBarrelOptimization(parse_result);
|
||||
|
||||
resolve_queue = runResolutionForParseTask(parse_result, this);
|
||||
if (parse_result.value == .err) {
|
||||
process_log = false;
|
||||
@@ -3933,125 +4114,40 @@ pub const BundleV2 = struct {
|
||||
result.ast.named_exports.count(),
|
||||
});
|
||||
|
||||
var iter = resolve_queue.iterator();
|
||||
|
||||
const path_to_source_index_map = this.pathToSourceIndexMap(result.ast.target);
|
||||
const original_target = result.ast.target;
|
||||
while (iter.next()) |entry| {
|
||||
const value: *ParseTask = entry.value_ptr.*;
|
||||
const loader = value.loader orelse value.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
|
||||
const is_html_entrypoint = loader == .html and original_target.isServerSide() and this.transpiler.options.dev_server == null;
|
||||
const map: *PathToSourceIndexMap = if (is_html_entrypoint) this.pathToSourceIndexMap(.browser) else path_to_source_index_map;
|
||||
const existing = map.getOrPut(this.allocator(), entry.key_ptr.*) catch unreachable;
|
||||
|
||||
// Originally, we attempted to avoid the "dual package
|
||||
// hazard" right here by checking if pathToSourceIndexMap
|
||||
// already contained the secondary_path for the ParseTask.
|
||||
// That leads to a race condition where whichever parse task
|
||||
// completes first ends up being used in the bundle. So we
|
||||
// added `scanForSecondaryPaths` before `findReachableFiles`
|
||||
// to prevent that.
|
||||
//
|
||||
// It would be nice, in theory, to find a way to bring that
|
||||
// back because it means we can skip parsing the files we
|
||||
// don't end up using.
|
||||
//
|
||||
|
||||
if (!existing.found_existing) {
|
||||
var new_task: *ParseTask = value;
|
||||
var new_input_file = Graph.InputFile{
|
||||
.source = Logger.Source.initEmptyFile(new_task.path.text),
|
||||
.side_effects = value.side_effects,
|
||||
.secondary_path = if (value.secondary_path_for_commonjs_interop) |*secondary_path| secondary_path.text else "",
|
||||
};
|
||||
|
||||
graph.has_any_secondary_paths = graph.has_any_secondary_paths or new_input_file.secondary_path.len > 0;
|
||||
|
||||
new_input_file.source.index = Index.source(graph.input_files.len);
|
||||
new_input_file.source.path = new_task.path;
|
||||
|
||||
// We need to ensure the loader is set or else importstar_ts/ReExportTypeOnlyFileES6 will fail.
|
||||
new_input_file.loader = loader;
|
||||
new_task.source_index = new_input_file.source.index;
|
||||
new_task.ctx = this;
|
||||
existing.value_ptr.* = new_task.source_index.get();
|
||||
|
||||
diff += 1;
|
||||
|
||||
graph.input_files.append(this.allocator(), new_input_file) catch unreachable;
|
||||
graph.ast.append(this.allocator(), JSAst.empty) catch unreachable;
|
||||
|
||||
if (is_html_entrypoint) {
|
||||
this.ensureClientTranspiler();
|
||||
this.graph.entry_points.append(this.allocator(), new_input_file.source.index) catch unreachable;
|
||||
}
|
||||
|
||||
if (this.enqueueOnLoadPluginIfNeeded(new_task)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (loader.shouldCopyForBundling()) {
|
||||
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()];
|
||||
bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = new_task.source_index.get() }));
|
||||
new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data;
|
||||
graph.estimated_file_loader_count += 1;
|
||||
}
|
||||
|
||||
graph.pool.schedule(new_task);
|
||||
} else {
|
||||
if (loader.shouldCopyForBundling()) {
|
||||
var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()];
|
||||
bun.handleOom(additional_files.append(this.allocator(), .{ .source_index = existing.value_ptr.* }));
|
||||
graph.estimated_file_loader_count += 1;
|
||||
}
|
||||
|
||||
bun.default_allocator.destroy(value);
|
||||
}
|
||||
}
|
||||
|
||||
var import_records = result.ast.import_records.clone(this.allocator()) catch unreachable;
|
||||
|
||||
const input_file_loaders = graph.input_files.items(.loader);
|
||||
const save_import_record_source_index = this.transpiler.options.dev_server == null or
|
||||
result.loader == .html or
|
||||
result.loader.isCSS();
|
||||
|
||||
if (this.resolve_tasks_waiting_for_import_source_index.fetchSwapRemove(result.source.index.get())) |pending_entry| {
|
||||
var value = pending_entry.value;
|
||||
for (value.slice()) |to_assign| {
|
||||
if (save_import_record_source_index or
|
||||
input_file_loaders[to_assign.to_source_index.get()].isCSS())
|
||||
{
|
||||
import_records.slice()[to_assign.import_record_index].source_index = to_assign.to_source_index;
|
||||
}
|
||||
}
|
||||
value.deinit(this.allocator());
|
||||
}
|
||||
|
||||
if (result.ast.css != null) {
|
||||
graph.css_file_count += 1;
|
||||
}
|
||||
|
||||
for (import_records.slice(), 0..) |*record, i| {
|
||||
if (path_to_source_index_map.getPath(&record.path)) |source_index| {
|
||||
if (save_import_record_source_index or input_file_loaders[source_index] == .css)
|
||||
record.source_index.value = source_index;
|
||||
diff += this.processResolveQueue(resolve_queue, result.ast.target, result.source.index.get());
|
||||
|
||||
if (getRedirectId(result.ast.redirect_import_record_index)) |compare| {
|
||||
if (compare == @as(u32, @truncate(i))) {
|
||||
path_to_source_index_map.put(
|
||||
this.allocator(),
|
||||
result.source.path.text,
|
||||
source_index,
|
||||
) catch unreachable;
|
||||
}
|
||||
var import_records = result.ast.import_records.clone(this.allocator()) catch unreachable;
|
||||
this.patchImportRecordSourceIndices(&import_records, .{
|
||||
.source_index = result.source.index,
|
||||
.source_path = result.source.path.text,
|
||||
.loader = result.loader,
|
||||
.target = result.ast.target,
|
||||
.redirect_import_record_index = result.ast.redirect_import_record_index,
|
||||
});
|
||||
result.ast.import_records = import_records;
|
||||
|
||||
// Set is_export_star_target for barrel optimization
|
||||
for (result.ast.export_star_import_records) |star_record_idx| {
|
||||
if (star_record_idx < import_records.len) {
|
||||
const star_ir = import_records.slice()[star_record_idx];
|
||||
if (star_ir.source_index.isValid()) {
|
||||
graph.input_files.items(.flags)[star_ir.source_index.get()].is_export_star_target = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
result.ast.import_records = import_records;
|
||||
|
||||
graph.ast.set(result.source.index.get(), result.ast);
|
||||
|
||||
// Barrel optimization: eagerly record import requests and
|
||||
// un-defer barrel records that are now needed.
|
||||
if (this.isBarrelOptimizationEnabled()) {
|
||||
diff += bun.handleOom(this.scheduleBarrelDeferredImports(result));
|
||||
}
|
||||
|
||||
// For files with use directives, index and prepare the other side.
|
||||
if (result.use_directive != .none and if (this.framework.?.server_components.?.separate_ssr_graph)
|
||||
((result.use_directive == .client) == (result.ast.target == .browser))
|
||||
|
||||
@@ -128,12 +128,17 @@ pub fn convertStmtsForChunk(
|
||||
}
|
||||
|
||||
// "export * from 'path'"
|
||||
const record = ast.import_records.at(s.import_record_index);
|
||||
|
||||
// Barrel optimization: deferred export * records should be dropped
|
||||
if (record.flags.is_unused) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!shouldStripExports) {
|
||||
break :process_stmt;
|
||||
}
|
||||
|
||||
const record = ast.import_records.at(s.import_record_index);
|
||||
|
||||
// Is this export star evaluated at run time?
|
||||
if (!record.source_index.isValid() and c.options.output_format.keepES6ImportExportSyntax()) {
|
||||
if (record.flags.calls_runtime_re_export_fn) {
|
||||
|
||||
@@ -60,6 +60,22 @@ pub fn convertStmtsForChunkForDevServer(
|
||||
const record = ast.import_records.mut(st.import_record_index);
|
||||
if (record.path.is_disabled) continue;
|
||||
|
||||
if (record.flags.is_unused) {
|
||||
// Barrel optimization: this import was deferred (unused submodule).
|
||||
// Don't add to dep array, but declare the namespace ref as an
|
||||
// empty object so body code referencing it doesn't throw.
|
||||
if (st.star_name_loc != null or st.items.len > 0 or st.default_name != null) {
|
||||
try stmts.inside_wrapper_prefix.appendNonDependency(Stmt.alloc(S.Local, .{
|
||||
.kind = .k_var,
|
||||
.decls = try G.Decl.List.fromSlice(allocator, &.{.{
|
||||
.binding = Binding.alloc(allocator, B.Identifier{ .ref = st.namespace_ref }, stmt.loc),
|
||||
.value = Expr.init(E.Object, .{}, stmt.loc),
|
||||
}}),
|
||||
}, stmt.loc));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const is_builtin = record.tag == .builtin or record.tag == .bun or record.tag == .runtime;
|
||||
const is_bare_import = st.star_name_loc == null and st.items.len == 0 and st.default_name == null;
|
||||
|
||||
|
||||
@@ -1865,6 +1865,12 @@ pub const BundleOptions = struct {
|
||||
|
||||
ignore_module_resolution_errors: bool = false,
|
||||
|
||||
/// Package names whose barrel files should be optimized.
|
||||
/// When set, barrel files from these packages will only load submodules
|
||||
/// that are actually imported. Also, any file with sideEffects: false
|
||||
/// in its package.json is automatically a barrel candidate.
|
||||
optimize_imports: ?*const bun.StringSet = null,
|
||||
|
||||
pub const ForceNodeEnv = enum {
|
||||
unspecified,
|
||||
development,
|
||||
|
||||
@@ -417,3 +417,135 @@ devTest("commonjs forms", {
|
||||
await c.expectMessage({ field: "6" });
|
||||
},
|
||||
});
|
||||
|
||||
// --- Barrel optimization tests ---
|
||||
|
||||
devTest("barrel optimization skips unused submodules", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({ scripts: ["index.ts"] }),
|
||||
"index.ts": `
|
||||
import { Alpha } from 'barrel-lib';
|
||||
console.log('got: ' + Alpha);
|
||||
`,
|
||||
"node_modules/barrel-lib/package.json": JSON.stringify({
|
||||
name: "barrel-lib",
|
||||
version: "1.0.0",
|
||||
main: "./index.js",
|
||||
sideEffects: false,
|
||||
}),
|
||||
"node_modules/barrel-lib/index.js": `
|
||||
export { Alpha } from './alpha.js';
|
||||
export { Beta } from './beta.js';
|
||||
export { Gamma } from './gamma.js';
|
||||
`,
|
||||
"node_modules/barrel-lib/alpha.js": `export const Alpha = "ALPHA";`,
|
||||
"node_modules/barrel-lib/beta.js": `export const Beta = <<<SYNTAX_ERROR>>>;`,
|
||||
"node_modules/barrel-lib/gamma.js": `export const Gamma = <<<SYNTAX_ERROR>>>;`,
|
||||
},
|
||||
async test(dev) {
|
||||
// Beta.js and Gamma.js have syntax errors.
|
||||
// If barrel optimization works, they are never parsed, so no error.
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("got: ALPHA");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("barrel optimization: adding a new import triggers reload", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({ scripts: ["index.ts"] }),
|
||||
"index.ts": `
|
||||
import { Alpha } from 'barrel-lib';
|
||||
console.log('result: ' + Alpha);
|
||||
`,
|
||||
"node_modules/barrel-lib/package.json": JSON.stringify({
|
||||
name: "barrel-lib",
|
||||
version: "1.0.0",
|
||||
main: "./index.js",
|
||||
sideEffects: false,
|
||||
}),
|
||||
"node_modules/barrel-lib/index.js": `
|
||||
export { Alpha } from './alpha.js';
|
||||
export { Beta } from './beta.js';
|
||||
export { Gamma } from './gamma.js';
|
||||
`,
|
||||
"node_modules/barrel-lib/alpha.js": `export const Alpha = "ALPHA";`,
|
||||
"node_modules/barrel-lib/beta.js": `export const Beta = "BETA";`,
|
||||
"node_modules/barrel-lib/gamma.js": `export const Gamma = "GAMMA";`,
|
||||
},
|
||||
async test(dev) {
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("result: ALPHA");
|
||||
|
||||
// Add a second import from the barrel — Beta was previously deferred,
|
||||
// now needs to be loaded. The barrel file should be re-bundled with
|
||||
// Beta un-deferred.
|
||||
await c.expectReload(async () => {
|
||||
await dev.write(
|
||||
"index.ts",
|
||||
`
|
||||
import { Alpha, Beta } from 'barrel-lib';
|
||||
console.log('result: ' + Alpha + ' ' + Beta);
|
||||
`,
|
||||
);
|
||||
});
|
||||
await c.expectMessage("result: ALPHA BETA");
|
||||
|
||||
// Add a third import
|
||||
await c.expectReload(async () => {
|
||||
await dev.write(
|
||||
"index.ts",
|
||||
`
|
||||
import { Alpha, Beta, Gamma } from 'barrel-lib';
|
||||
console.log('result: ' + Alpha + ' ' + Beta + ' ' + Gamma);
|
||||
`,
|
||||
);
|
||||
});
|
||||
await c.expectMessage("result: ALPHA BETA GAMMA");
|
||||
},
|
||||
});
|
||||
|
||||
devTest("barrel optimization: multi-file imports preserved across rebuilds", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({ scripts: ["index.ts"] }),
|
||||
"index.ts": `
|
||||
import { Alpha } from 'barrel-lib';
|
||||
import { value } from './other';
|
||||
console.log('result: ' + Alpha + ' ' + value);
|
||||
`,
|
||||
"other.ts": `
|
||||
import { Beta } from 'barrel-lib';
|
||||
export const value = Beta;
|
||||
`,
|
||||
"node_modules/barrel-lib/package.json": JSON.stringify({
|
||||
name: "barrel-lib",
|
||||
version: "1.0.0",
|
||||
main: "./index.js",
|
||||
sideEffects: false,
|
||||
}),
|
||||
"node_modules/barrel-lib/index.js": `
|
||||
export { Alpha } from './alpha.js';
|
||||
export { Beta } from './beta.js';
|
||||
export { Gamma } from './gamma.js';
|
||||
`,
|
||||
"node_modules/barrel-lib/alpha.js": `export const Alpha = "ALPHA";`,
|
||||
"node_modules/barrel-lib/beta.js": `export const Beta = "BETA";`,
|
||||
"node_modules/barrel-lib/gamma.js": `export const Gamma = "GAMMA";`,
|
||||
},
|
||||
async test(dev) {
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("result: ALPHA BETA");
|
||||
|
||||
// Edit only other.ts to also import Gamma. Alpha (from index.ts) must
|
||||
// still be available even though index.ts is not re-parsed.
|
||||
await c.expectReload(async () => {
|
||||
await dev.write(
|
||||
"other.ts",
|
||||
`
|
||||
import { Beta, Gamma } from 'barrel-lib';
|
||||
export const value = Beta + ' ' + Gamma;
|
||||
`,
|
||||
);
|
||||
});
|
||||
await c.expectMessage("result: ALPHA BETA GAMMA");
|
||||
},
|
||||
});
|
||||
|
||||
1431
test/bundler/bundler_barrel.test.ts
Normal file
1431
test/bundler/bundler_barrel.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -163,6 +163,8 @@ export interface BundlerTestInput {
|
||||
drop?: string[];
|
||||
/** Feature flags for dead-code elimination via `import { feature } from "bun:bundle"` */
|
||||
features?: string[];
|
||||
/** Package names whose barrel files should be optimized */
|
||||
optimizeImports?: string[];
|
||||
|
||||
/** Use for resolve custom conditions */
|
||||
conditions?: string[];
|
||||
@@ -447,6 +449,7 @@ function expectBundled(
|
||||
packages,
|
||||
drop = [],
|
||||
features = [],
|
||||
optimizeImports,
|
||||
files,
|
||||
footer,
|
||||
format,
|
||||
@@ -716,6 +719,9 @@ function expectBundled(
|
||||
if (plugins) {
|
||||
throw new Error("plugins not possible in backend=CLI");
|
||||
}
|
||||
if (optimizeImports) {
|
||||
throw new Error("optimizeImports not possible in backend=CLI (API-only option)");
|
||||
}
|
||||
const cmd = (
|
||||
!ESBUILD
|
||||
? [
|
||||
@@ -1118,6 +1124,7 @@ function expectBundled(
|
||||
ignoreDCEAnnotations,
|
||||
drop,
|
||||
features,
|
||||
optimizeImports,
|
||||
define: define ?? {},
|
||||
throw: _throw ?? false,
|
||||
compile,
|
||||
|
||||
Reference in New Issue
Block a user