From 474e66462ebbd3e3fa27dcd9a2f663b602dad628 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Feb 2026 17:32:13 -0800 Subject: [PATCH] ok --- src/bundler/barrel_imports.zig | 58 +++++++++++++++++++--------------- src/bundler/bundle_v2.zig | 7 +++- 2 files changed, 38 insertions(+), 27 deletions(-) diff --git a/src/bundler/barrel_imports.zig b/src/bundler/barrel_imports.zig index d10b50fd75..7f70a66111 100644 --- a/src/bundler/barrel_imports.zig +++ b/src/bundler/barrel_imports.zig @@ -231,17 +231,17 @@ pub fn scheduleBarrelDeferredImports(this: *BundleV2, result: *ParseTask.Result. const ni = ni_entry.value_ptr; if (ni.import_record_index >= file_import_records.len) continue; try named_ir_indices.put(named_ir_indices_alloc, ni.import_record_index, {}); - const ir = &file_import_records.slice()[ni.import_record_index]; - // In dev server mode, source_index may not be patched — resolve via path map. - if (!ir.source_index.isValid()) { - if (path_to_source_index_map) |map| { - if (map.getPath(&ir.path)) |src_idx| { - ir.source_index.value = src_idx; - } - } - } - if (!ir.source_index.isValid()) continue; - const target = ir.source_index.get(); + const ir = file_import_records.slice()[ni.import_record_index]; + // In dev server mode, source_index may not be patched — resolve via + // path map as a read-only fallback. Do NOT write back to the import + // record — the dev server intentionally leaves source_indices unset + // and other code (IncrementalGraph, printer) depends on that. + const target = if (ir.source_index.isValid()) + ir.source_index.get() + else if (path_to_source_index_map) |map| + map.getPath(&ir.path) orelse continue + else + continue; const gop = try this.requested_exports.getOrPut(this.allocator(), target); if (ni.alias_is_star) { @@ -272,19 +272,16 @@ pub fn scheduleBarrelDeferredImports(this: *BundleV2, result: *ParseTask.Result. // meaning this is the sole reference. If the barrel already has a .partial // entry from a static import, the dynamic import is likely a secondary // (possibly circular) reference and should not escalate requirements. - for (file_import_records.slice(), 0..) |*ir, idx| { - if (!ir.source_index.isValid()) { - if (path_to_source_index_map) |map| { - if (map.getPath(&ir.path)) |src_idx| { - ir.source_index.value = src_idx; - } - } - } - if (!ir.source_index.isValid()) continue; + for (file_import_records.slice(), 0..) |ir, idx| { + const target = if (ir.source_index.isValid()) + ir.source_index.get() + else if (path_to_source_index_map) |map| + map.getPath(&ir.path) orelse continue + else + continue; if (ir.flags.is_internal) continue; if (named_ir_indices.contains(@intCast(idx))) continue; if (ir.flags.was_originally_bare_import) continue; - const target = ir.source_index.get(); if (ir.kind == .require) { const gop = try this.requested_exports.getOrPut(this.allocator(), target); gop.value_ptr.* = .all; @@ -310,23 +307,32 @@ pub fn scheduleBarrelDeferredImports(this: *BundleV2, result: *ParseTask.Result. const ni = ni_entry.value_ptr; if (ni.import_record_index >= file_import_records.len) continue; const ir = file_import_records.slice()[ni.import_record_index]; - if (!ir.source_index.isValid()) continue; + const ir_target = if (ir.source_index.isValid()) + ir.source_index.get() + else if (path_to_source_index_map) |map| + map.getPath(&ir.path) orelse continue + else + continue; if (ni.alias_is_star) { - try queue.append(queue_alloc, .{ .barrel_source_index = ir.source_index.get(), .alias = "", .is_star = true }); + try queue.append(queue_alloc, .{ .barrel_source_index = ir_target, .alias = "", .is_star = true }); } else if (ni.alias) |alias| { - try queue.append(queue_alloc, .{ .barrel_source_index = ir.source_index.get(), .alias = alias, .is_star = false }); + try queue.append(queue_alloc, .{ .barrel_source_index = ir_target, .alias = alias, .is_star = false }); } } // Add bare require/dynamic-import targets to BFS as star imports (matching // the seeding logic above — require always, dynamic only when sole reference). for (file_import_records.slice(), 0..) |ir, idx| { - if (!ir.source_index.isValid()) continue; + const target = if (ir.source_index.isValid()) + ir.source_index.get() + else if (path_to_source_index_map) |map| + map.getPath(&ir.path) orelse continue + else + continue; if (ir.flags.is_internal) continue; if (named_ir_indices.contains(@intCast(idx))) continue; if (ir.flags.was_originally_bare_import) continue; - const target = ir.source_index.get(); const is_all = if (this.requested_exports.get(target)) |re| re == .all else false; const should_add = ir.kind == .require or (ir.kind == .dynamic and is_all); if (should_add) { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 410f70623c..d4f70a7793 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -3365,7 +3365,12 @@ pub const BundleV2 = struct { import_record.source_index = Index.runtime; } - if (import_record.flags.is_unused) { + // For non-dev-server builds, barrel-deferred records need their + // source_index cleared so they don't get linked. For dev server, + // skip this — is_unused is also set by ConvertESMExportsForHmr + // deduplication, and clearing those source_indices breaks module + // identity (e.g., __esModule on ESM namespace objects). + if (import_record.flags.is_unused and this.transpiler.options.dev_server == null) { import_record.source_index = Index.invalid; }