Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
5740da85ea fix(bundler): collect external imports from converted stmts for bytecode ModuleInfo
When building with `--compile --bytecode --minify --format esm`, named
imports from npm packages caused "Cannot find module" errors at runtime.

The root cause was in `postProcessJSChunk` section 2 which scanned
original AST statements to collect external imports for ModuleInfo. It
relied on `record.source_index.isValid()` to skip bundled imports, but
the async resolution pipeline may not patch `source_index` on import
records from transitive dependencies. This caused fully-inlined bundled
imports to be incorrectly recorded as external dependencies in
ModuleInfo, creating a mismatch with what JSC's parser sees in the
actual emitted code.

The fix extends DeclCollector to also capture `s_import` statements from
the **converted** output (after `convertStmtsForChunk`). Since bundled
imports are removed during conversion, only truly-external imports
survive. `postProcessJSChunk` now uses these collected imports instead of
scanning the original AST, ensuring ModuleInfo accurately reflects the
emitted code.

Closes #27454

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-26 09:51:38 +00:00
5 changed files with 123 additions and 63 deletions

View File

@@ -4690,6 +4690,15 @@ pub const CompileResult = union(enum) {
kind: Kind,
};
/// Import info collected from **converted** statements during parallel
/// printing. Only truly-external imports survive conversion (bundled ones
/// are removed by `convertStmtsForChunk`), so these accurately reflect
/// what the printer emits.
pub const ImportInfoCollected = struct {
import_record_index: u32,
source_index: u32,
};
javascript: struct {
source_index: Index.Int,
result: js_printer.PrintResult,
@@ -4697,6 +4706,11 @@ pub const CompileResult = union(enum) {
/// parallel printing. Used by postProcessJSChunk to populate ModuleInfo
/// without re-scanning the original (unconverted) AST.
decls: []const DeclInfo = &.{},
/// Import info collected from converted statements during parallel
/// printing. Used by postProcessJSChunk to populate ModuleInfo instead
/// of re-scanning the original (unconverted) AST, which may contain
/// bundled imports with unresolved source_index.
imports: []const ImportInfoCollected = &.{},
pub fn code(this: @This()) []const u8 {
return switch (this.result) {

View File

@@ -640,20 +640,21 @@ pub fn generateCodeForFileInChunkJS(
pub const DeclCollector = struct {
decls: std.ArrayListUnmanaged(CompileResult.DeclInfo) = .{},
imports: std.ArrayListUnmanaged(CompileResult.ImportInfoCollected) = .{},
allocator: std.mem.Allocator,
source_index: u32 = 0,
const CompileResult = bun.bundle_v2.CompileResult;
/// Collect top-level declarations from **converted** statements (after
/// `convertStmtsForChunk`). At that point, export statements have already
/// been transformed:
/// - `s_export_default` → `s_local` / `s_function` / `s_class`
/// - `s_export_clause` → removed entirely
/// - `s_export_from` / `s_export_star` → removed or converted to `s_import`
/// Collect top-level declarations and import statements from **converted**
/// statements (after `convertStmtsForChunk`). At that point:
/// - Export statements have been transformed (export default → var, etc.)
/// - Bundled imports have been removed by `shouldRemoveImportExportStmt`
/// - Only truly-external imports (non-bundled) remain as `s_import`
///
/// Remaining `s_import` statements (external, non-bundled) don't need
/// handling here; their bindings are recorded separately in
/// `postProcessJSChunk` by scanning the original AST import records.
/// This ensures that only imports actually present in the emitted code
/// are recorded in ModuleInfo, avoiding mismatches when import records
/// have unresolved `source_index` from the async resolution pipeline.
pub fn collectFromStmts(self: *DeclCollector, stmts: []const Stmt, r: renamer.Renamer, c: *LinkerContext) void {
for (stmts) |stmt| {
switch (stmt.data) {
@@ -677,6 +678,12 @@ pub const DeclCollector = struct {
}
}
},
.s_import => |s| {
self.imports.append(self.allocator, .{
.import_record_index = s.import_record_index,
.source_index = self.source_index,
}) catch return;
},
else => {},
}
}

View File

@@ -47,7 +47,7 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
const runtimeRequireRef = if (c.options.output_format == .cjs) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref);
const collect_decls = c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile;
var dc = DeclCollector{ .allocator = allocator };
var dc = DeclCollector{ .allocator = allocator, .source_index = part_range.source_index.get() };
const result = c.generateCodeForFileInChunkJS(
&buffer_writer,
@@ -80,6 +80,7 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
.source_index = part_range.source_index.get(),
.result = result,
.decls = if (collect_decls) dc.decls.items else &.{},
.imports = if (collect_decls) dc.imports.items else &.{},
},
};
}

View File

@@ -130,69 +130,78 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
}
}
// 2. Collect truly-external imports from the original AST. Bundled imports
// (where source_index is valid) are removed by convertStmtsForChunk and
// re-created as cross-chunk imports — those are already captured by the
// printer when it prints cross_chunk_prefix_stmts above. Only truly-external
// imports (node built-ins, etc.) survive as s_import in per-file parts and
// need recording here.
const all_parts = c.graph.ast.items(.parts);
const all_flags = c.graph.meta.items(.flags);
// 2. Collect truly-external imports from **converted** statements.
// DeclCollector captures s_import statements that survive
// convertStmtsForChunk (which removes bundled imports). This avoids
// scanning the original AST where import records may have unresolved
// source_index from the async resolution pipeline, causing phantom
// imports to be recorded in ModuleInfo that don't exist in the
// emitted code.
const all_import_records = c.graph.ast.items(.import_records);
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
if (all_flags[part_range.source_index.get()].wrap == .cjs) continue;
const source_parts = all_parts[part_range.source_index.get()].slice();
const source_import_records = all_import_records[part_range.source_index.get()].slice();
var part_i = part_range.part_index_begin;
while (part_i < part_range.part_index_end) : (part_i += 1) {
for (source_parts[part_i].stmts) |stmt| {
switch (stmt.data) {
.s_import => |s| {
const record = &source_import_records[s.import_record_index];
if (record.path.is_disabled) continue;
if (record.tag == .bun) continue;
// Skip bundled imports — these are converted to cross-chunk
// imports by the linker. The printer already recorded them
// when printing cross_chunk_prefix_stmts.
if (record.source_index.isValid()) continue;
for (chunk.compile_results_for_chunk) |cr| {
const imports = switch (cr) {
.javascript => |js| js.imports,
else => continue,
};
for (imports) |import_info| {
const source_import_records = all_import_records[import_info.source_index].slice();
if (import_info.import_record_index >= source_import_records.len) continue;
const record = &source_import_records[import_info.import_record_index];
if (record.path.is_disabled) continue;
if (record.tag == .bun) continue;
const import_path = record.path.text;
const irp_id = mi.str(import_path) catch continue;
mi.requestModule(irp_id, .none) catch continue;
if (s.default_name) |name| {
if (name.ref) |name_ref| {
const local_name = chunk.renamer.nameForSymbol(name_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoSingle(irp_id, mi.str("default") catch continue, local_name_id, false) catch continue;
}
// Look up the original s_import statement from the AST to
// access default_name, items, and namespace_ref.
const source_parts = c.graph.ast.items(.parts)[import_info.source_index].slice();
const s_import = brk: {
for (source_parts) |part| {
for (part.stmts) |stmt| {
switch (stmt.data) {
.s_import => |s| {
if (s.import_record_index == import_info.import_record_index)
break :brk s;
},
else => {},
}
for (s.items) |item| {
if (item.name.ref) |name_ref| {
const local_name = chunk.renamer.nameForSymbol(name_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoSingle(irp_id, mi.str(item.alias) catch continue, local_name_id, false) catch continue;
}
}
if (record.flags.contains_import_star) {
const local_name = chunk.renamer.nameForSymbol(s.namespace_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoNamespace(irp_id, local_name_id) catch continue;
}
},
else => {},
}
}
continue;
};
const import_path = record.path.text;
const irp_id = mi.str(import_path) catch continue;
mi.requestModule(irp_id, .none) catch continue;
if (s_import.default_name) |name| {
if (name.ref) |name_ref| {
const local_name = chunk.renamer.nameForSymbol(name_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoSingle(irp_id, mi.str("default") catch continue, local_name_id, false) catch continue;
}
}
for (s_import.items) |item| {
if (item.name.ref) |name_ref| {
const local_name = chunk.renamer.nameForSymbol(name_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoSingle(irp_id, mi.str(item.alias) catch continue, local_name_id, false) catch continue;
}
}
if (record.flags.contains_import_star) {
const local_name = chunk.renamer.nameForSymbol(s_import.namespace_ref);
const local_name_id = mi.str(local_name) catch continue;
mi.addVar(local_name_id, .lexical) catch continue;
mi.addImportInfoNamespace(irp_id, local_name_id) catch continue;
}
}
}
// 3. Add wrapper-generated declarations (init_xxx, require_xxx) that are
// not in any part statement.
const all_flags = c.graph.meta.items(.flags);
const all_wrapper_refs = c.graph.ast.items(.wrapper_ref);
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
const source_index = part_range.source_index.get();

View File

@@ -172,6 +172,35 @@ describe("bundler", () => {
},
stdout: "a b",
},
{
// #27454: Named imports from packages with re-export index files.
// The package index imports from internal modules and re-exports them.
// When bundled, these internal imports are inlined, but the original AST
// import records may have unresolved source_index. The ModuleInfo must
// not record these phantom imports.
name: "PackageReExportNamedImport",
files: {
"/entry.ts": `
import { greet } from "fake-pkg";
console.log(greet("world"));
`,
"/node_modules/fake-pkg/package.json": `{"name":"fake-pkg","version":"1.0.0","type":"module","exports":{".":{"import":"./libesm/index.js"}}}`,
"/node_modules/fake-pkg/libesm/index.js": [
`import { greet } from './utils/greet.js';`,
`import { format } from './utils/format.js';`,
`import { upper } from './utils/upper.js';`,
`import { lower } from './utils/lower.js';`,
`import { trim } from './utils/trim.js';`,
`export { greet, format, upper, lower, trim };`,
].join("\n"),
"/node_modules/fake-pkg/libesm/utils/greet.js": `import { format } from './format.js';\nexport function greet(name) { return format("Hello, " + name + "!"); }`,
"/node_modules/fake-pkg/libesm/utils/format.js": `export function format(str) { return "[" + str + "]"; }`,
"/node_modules/fake-pkg/libesm/utils/upper.js": `export function upper(str) { return str.toUpperCase(); }`,
"/node_modules/fake-pkg/libesm/utils/lower.js": `export function lower(str) { return str.toLowerCase(); }`,
"/node_modules/fake-pkg/libesm/utils/trim.js": `export function trim(str) { return str.trim(); }`,
},
stdout: "[Hello, world!]",
},
];
for (const scenario of esmBytecodeScenarios) {