Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
7fd1eebaa9 Fix bundler cache to differentiate import attributes
This partially addresses the issue where importing the same file with
different import attributes (e.g., `with { type: "text" }`) was
incorrectly returning the same cached module.

Changes:
- Modified PathToSourceIndexMap to use composite cache keys (path, loader)
- Updated bundle_v2.zig to pass loader to all cache operations
- Fixed DevServer file invalidation to clear all loader variants
- Updated LinkerContext.zig for HTML import cache lookup
- Added comprehensive test suite for import attributes

Note: This fixes the bundler-level caching. The runtime module loader
in JSC still needs to be updated to fully resolve the issue. The
runtime changes require modifications to WebKit's ModuleLoader.js to
include import attributes in the module registry cache key.

Test results show bundler now creates separate modules (visible in
bundled output), but runtime execution still shares the module due
to JSC's module cache not considering import attributes yet.
2025-10-18 08:11:39 +00:00
5 changed files with 279 additions and 57 deletions

View File

@@ -1563,10 +1563,20 @@ pub fn IncrementalGraph(comptime side: bake.Side) type {
const dirname = std.fs.path.dirname(abs_path) orelse abs_path;
_ = bv2.transpiler.resolver.bustDirCache(dirname);
// Additionally, clear the cached entry of the file from the path to
// source index map.
for (&bv2.graph.build_graphs.values) |*map| {
_ = map.remove(abs_path);
// Additionally, clear all cached entries for this file from the path to
// source index map (all loader variants).
const PathToSourceIndexMap = @import("../../bundler/PathToSourceIndexMap.zig");
for (&bv2.graph.build_graphs.values) |*path_map| {
var iter = path_map.map.iterator();
var to_remove = std.BoundedArray(PathToSourceIndexMap.CacheKey, 16){};
while (iter.next()) |entry| {
if (bun.strings.eql(entry.key_ptr.path, abs_path)) {
to_remove.append(entry.key_ptr.*) catch break;
}
}
for (to_remove.slice()) |key| {
_ = path_map.map.remove(key);
}
}
}

View File

@@ -307,7 +307,7 @@ pub const LinkerContext = struct {
for (server_source_indices.slice()) |html_import| {
const source = &input_files[html_import];
const source_index = map.get(source.path.text) orelse {
const source_index = map.get(source.path.text, .html) orelse {
@panic("Assertion failed: HTML import file not found in pathToSourceIndexMap");
};

View File

@@ -5,38 +5,66 @@ const PathToSourceIndexMap = @This();
/// We assume it's arena allocated.
map: Map = .{},
const Map = bun.StringHashMapUnmanaged(Index.Int);
/// Cache key that combines path and loader to differentiate
/// the same file imported with different import attributes.
pub const CacheKey = struct {
path: []const u8,
loader: options.Loader,
pub fn getPath(this: *const PathToSourceIndexMap, path: *const Fs.Path) ?Index.Int {
return this.get(path.text);
pub fn hash(self: CacheKey, hasher: anytype) void {
hasher.update(self.path);
hasher.update(std.mem.asBytes(&self.loader));
}
pub fn eql(a: CacheKey, b: CacheKey, comptime _: @TypeOf(.{})) bool {
return a.loader == b.loader and bun.strings.eql(a.path, b.path);
}
};
const CacheKeyContext = struct {
pub fn hash(_: @This(), key: CacheKey) u32 {
var hasher = std.hash.Wyhash.init(0);
key.hash(&hasher);
return @truncate(hasher.final());
}
pub fn eql(_: @This(), a: CacheKey, b: CacheKey) bool {
return CacheKey.eql(a, b, .{});
}
};
const Map = std.HashMapUnmanaged(CacheKey, Index.Int, CacheKeyContext, std.hash_map.default_max_load_percentage);
pub fn getPath(this: *const PathToSourceIndexMap, path: *const Fs.Path, loader: options.Loader) ?Index.Int {
return this.get(path.text, loader);
}
pub fn get(this: *const PathToSourceIndexMap, text: []const u8) ?Index.Int {
return this.map.get(text);
pub fn get(this: *const PathToSourceIndexMap, text: []const u8, loader: options.Loader) ?Index.Int {
return this.map.get(.{ .path = text, .loader = loader });
}
pub fn putPath(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, path: *const Fs.Path, value: Index.Int) bun.OOM!void {
try this.map.put(allocator, path.text, value);
pub fn putPath(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, path: *const Fs.Path, loader: options.Loader, value: Index.Int) bun.OOM!void {
try this.put(allocator, path.text, loader, value);
}
pub fn put(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, text: []const u8, value: Index.Int) bun.OOM!void {
try this.map.put(allocator, text, value);
pub fn put(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, text: []const u8, loader: options.Loader, value: Index.Int) bun.OOM!void {
try this.map.put(allocator, .{ .path = text, .loader = loader }, value);
}
pub fn getOrPutPath(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, path: *const Fs.Path) bun.OOM!Map.GetOrPutResult {
return this.getOrPut(allocator, path.text);
pub fn getOrPutPath(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, path: *const Fs.Path, loader: options.Loader) bun.OOM!Map.GetOrPutResult {
return this.getOrPut(allocator, path.text, loader);
}
pub fn getOrPut(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, text: []const u8) bun.OOM!Map.GetOrPutResult {
return try this.map.getOrPut(allocator, text);
pub fn getOrPut(this: *PathToSourceIndexMap, allocator: std.mem.Allocator, text: []const u8, loader: options.Loader) bun.OOM!Map.GetOrPutResult {
return try this.map.getOrPut(allocator, .{ .path = text, .loader = loader });
}
pub fn remove(this: *PathToSourceIndexMap, text: []const u8) bool {
return this.map.remove(text);
pub fn remove(this: *PathToSourceIndexMap, text: []const u8, loader: options.Loader) bool {
return this.map.remove(.{ .path = text, .loader = loader });
}
pub fn removePath(this: *PathToSourceIndexMap, path: *const Fs.Path) bool {
return this.remove(path.text);
pub fn removePath(this: *PathToSourceIndexMap, path: *const Fs.Path, loader: options.Loader) bool {
return this.remove(path.text, loader);
}
const std = @import("std");
@@ -44,3 +72,4 @@ const std = @import("std");
const bun = @import("bun");
const Fs = bun.fs;
const Index = bun.ast.Index;
const options = bun.options;

View File

@@ -519,7 +519,8 @@ pub const BundleV2 = struct {
}
const secondary_path = secondary_paths[source_index];
if (secondary_path.len > 0) {
const secondary_source_index = path_to_source_index_map.get(secondary_path) orelse continue;
const secondary_loader = this.graph.input_files.items(.loader)[source_index];
const secondary_source_index = path_to_source_index_map.get(secondary_path, secondary_loader) orelse continue;
import_record.source_index = Index.init(secondary_source_index);
// Keep path in sync for determinism, diagnostics, and dev tooling.
import_record.path = this.graph.input_files.items(.source)[secondary_source_index].path;
@@ -654,18 +655,18 @@ pub const BundleV2 = struct {
path.assertPrettyIsValid();
path.assertFilePathIsAbsolute();
const entry = bun.handleOom(this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path.text));
const loader: Loader = brk: {
const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index];
if (record.loader) |out_loader| {
break :brk out_loader;
}
break :brk path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// HTML is only allowed at the entry point.
};
const entry = bun.handleOom(this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path.text, loader));
if (!entry.found_existing) {
path.* = bun.handleOom(this.pathWithPrettyInitialized(path.*, target));
entry.key_ptr.* = path.text;
const loader: Loader = brk: {
const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index];
if (record.loader) |out_loader| {
break :brk out_loader;
}
break :brk path.loader(&transpiler.options.loaders) orelse options.Loader.file;
// HTML is only allowed at the entry point.
};
entry.key_ptr.* = .{ .path = path.text, .loader = loader };
const idx = this.enqueueParseTask(
&resolve_result,
&.{
@@ -699,9 +700,9 @@ pub const BundleV2 = struct {
.browser => .{ this.pathToSourceIndexMap(this.transpiler.options.target), this.pathToSourceIndexMap(.bake_server_components_ssr) },
.bake_server_components_ssr => .{ this.pathToSourceIndexMap(this.transpiler.options.target), this.pathToSourceIndexMap(.browser) },
};
bun.handleOom(a.put(this.allocator(), entry.key_ptr.*, entry.value_ptr.*));
bun.handleOom(a.put(this.allocator(), entry.key_ptr.path, loader, entry.value_ptr.*));
if (this.framework.?.server_components.?.separate_ssr_graph)
bun.handleOom(b.put(this.allocator(), entry.key_ptr.*, entry.value_ptr.*));
bun.handleOom(b.put(this.allocator(), entry.key_ptr.path, loader, entry.value_ptr.*));
}
} else {
out_source_index = Index.init(entry.value_ptr.*);
@@ -719,24 +720,24 @@ pub const BundleV2 = struct {
target: options.Target,
) !void {
// TODO: plugins with non-file namespaces
const entry = try this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path_slice);
if (entry.found_existing) {
return;
}
const t = this.transpilerForTarget(target);
const result = t.resolveEntryPoint(path_slice) catch
return;
var path = result.path_pair.primary;
this.incrementScanCounter();
const source_index = Index.source(this.graph.input_files.len);
const loader = brk: {
const default = path.loader(&this.transpiler.options.loaders) orelse .file;
break :brk default;
};
const entry = try this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path_slice, loader);
if (entry.found_existing) {
return;
}
this.incrementScanCounter();
const source_index = Index.source(this.graph.input_files.len);
path = bun.handleOom(this.pathWithPrettyInitialized(path, target));
path.assertPrettyIsValid();
entry.key_ptr.* = path.text;
entry.key_ptr.* = .{ .path = path.text, .loader = loader };
entry.value_ptr.* = source_index.get();
bun.handleOom(this.graph.ast.append(this.allocator(), JSAst.empty));
@@ -784,21 +785,20 @@ pub const BundleV2 = struct {
var path = result.path() orelse return null;
path.assertFilePathIsAbsolute();
const entry = try this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path.text);
const loader = brk: {
const loader = path.loader(&this.transpiler.options.loaders) orelse .file;
break :brk loader;
};
const entry = try this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path.text, loader);
if (entry.found_existing) {
return null;
}
this.incrementScanCounter();
const source_index = Index.source(this.graph.input_files.len);
const loader = brk: {
const loader = path.loader(&this.transpiler.options.loaders) orelse .file;
break :brk loader;
};
path.* = bun.handleOom(this.pathWithPrettyInitialized(path.*, target));
path.assertPrettyIsValid();
entry.key_ptr.* = path.text;
entry.key_ptr.* = .{ .path = path.text, .loader = loader };
entry.value_ptr.* = source_index.get();
bun.handleOom(this.graph.ast.append(this.allocator(), JSAst.empty));
@@ -996,7 +996,7 @@ pub const BundleV2 = struct {
// try this.graph.entry_points.append(allocator, Index.runtime);
try this.graph.ast.append(this.allocator(), JSAst.empty);
try this.pathToSourceIndexMap(this.transpiler.options.target).put(this.allocator(), "bun:wrap", Index.runtime.get());
try this.pathToSourceIndexMap(this.transpiler.options.target).put(this.allocator(), "bun:wrap", .js, Index.runtime.get());
var runtime_parse_task = try this.allocator().create(ParseTask);
runtime_parse_task.* = rt.parse_task;
runtime_parse_task.ctx = this;
@@ -2450,19 +2450,19 @@ pub const BundleV2 = struct {
path.namespace = result.namespace;
}
const loader = path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
const existing = this.pathToSourceIndexMap(resolve.import_record.original_target)
.getOrPutPath(this.allocator(), &path) catch |err| bun.handleOom(err);
.getOrPutPath(this.allocator(), &path, loader) catch |err| bun.handleOom(err);
if (!existing.found_existing) {
this.free_list.appendSlice(&.{ result.namespace, result.path }) catch {};
path = bun.handleOom(this.pathWithPrettyInitialized(path, resolve.import_record.original_target));
existing.key_ptr.* = path.text;
existing.key_ptr.* = .{ .path = path.text, .loader = loader };
// We need to parse this
const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
existing.value_ptr.* = source_index.get();
out_source_index = source_index;
this.graph.ast.append(this.allocator(), JSAst.empty) catch unreachable;
const loader = path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
this.graph.input_files.append(this.allocator(), .{
.source = .{
@@ -3429,7 +3429,7 @@ pub const BundleV2 = struct {
const is_html_entrypoint = import_record_loader == .html and target.isServerSide() and this.transpiler.options.dev_server == null;
if (this.pathToSourceIndexMap(target).get(path.text)) |id| {
if (this.pathToSourceIndexMap(target).get(path.text, import_record_loader)) |id| {
if (this.transpiler.options.dev_server != null and loader != .html) {
import_record.path = this.graph.input_files.items(.source)[id].path;
} else {
@@ -3549,7 +3549,7 @@ pub const BundleV2 = struct {
try graph.ast.append(this.allocator(), ast_for_html_entrypoint);
import_record.source_index = fake_input_file.source.index;
try this.pathToSourceIndexMap(target).put(this.allocator(), path_text, fake_input_file.source.index.get());
try this.pathToSourceIndexMap(target).put(this.allocator(), path_text, .html, fake_input_file.source.index.get());
try graph.html_imports.server_source_indices.append(this.allocator(), fake_input_file.source.index.get());
this.ensureClientTranspiler();
}
@@ -3683,7 +3683,7 @@ pub const BundleV2 = struct {
const loader = value.loader orelse value.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
const is_html_entrypoint = loader == .html and original_target.isServerSide() and this.transpiler.options.dev_server == null;
const map: *PathToSourceIndexMap = if (is_html_entrypoint) this.pathToSourceIndexMap(.browser) else path_to_source_index_map;
const existing = map.getOrPut(this.allocator(), entry.key_ptr.*) catch unreachable;
const existing = map.getOrPut(this.allocator(), entry.key_ptr.*, loader) catch unreachable;
// Originally, we attempted to avoid the "dual package
// hazard" right here by checking if pathToSourceIndexMap
@@ -3774,15 +3774,18 @@ pub const BundleV2 = struct {
}
for (import_records.slice(), 0..) |*record, i| {
if (path_to_source_index_map.getPath(&record.path)) |source_index| {
const record_loader = record.loader orelse record.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file;
if (path_to_source_index_map.getPath(&record.path, record_loader)) |source_index| {
if (save_import_record_source_index or input_file_loaders[source_index] == .css)
record.source_index.value = source_index;
if (getRedirectId(result.ast.redirect_import_record_index)) |compare| {
if (compare == @as(u32, @truncate(i))) {
const result_loader = input_file_loaders[result.source.index.get()];
path_to_source_index_map.put(
this.allocator(),
result.source.path.text,
result_loader,
source_index,
) catch unreachable;
}
@@ -3836,9 +3839,11 @@ pub const BundleV2 = struct {
break :brk .{ server_index, Index.invalid.get() };
};
const result_loader = graph.input_files.items(.loader)[result.source.index.get()];
graph.pathToSourceIndexMap(result.ast.target).put(
this.allocator(),
result.source.path.text,
result_loader,
reference_source_index,
) catch |err| bun.handleOom(err);

View File

@@ -0,0 +1,178 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("import attributes should create separate module cache entries - json vs text", async () => {
using dir = tempDir("import-attrs-cache", {
"data.json": JSON.stringify({ test: 123 }),
"test.ts": `
import json from "./data.json";
import text from "./data.json" with { type: "text" };
console.log("JSON type:", typeof json);
console.log("JSON value:", JSON.stringify(json));
console.log("Text type:", typeof text);
console.log("Text value:", text);
console.log("Different?:", json !== text);
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test.ts"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).not.toContain("error");
expect(exitCode).toBe(0);
// Verify the json import returns an object
expect(stdout).toContain("JSON type: object");
expect(stdout).toContain('"test":123');
// Verify the text import returns a string
expect(stdout).toContain("Text type: string");
// Verify they are different values
expect(stdout).toContain("Different?: true");
});
test("import attributes should create separate module cache entries - dynamic imports", async () => {
using dir = tempDir("import-attrs-dynamic", {
"data.json": JSON.stringify({ dynamic: 456 }),
"test.ts": `
(async () => {
const json = await import("./data.json");
const text = await import("./data.json", { with: { type: "text" } });
console.log("JSON default:", typeof json.default);
console.log("JSON value:", JSON.stringify(json.default));
console.log("Text default:", typeof text.default);
console.log("Different?:", json.default !== text.default);
})();
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test.ts"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).not.toContain("error");
expect(exitCode).toBe(0);
// Verify the json import returns an object
expect(stdout).toContain("JSON default: object");
expect(stdout).toContain('"dynamic":456');
// Verify the text import returns a string
expect(stdout).toContain("Text default: string");
// Verify they are different values
expect(stdout).toContain("Different?: true");
});
test("import attributes should work in bundler - multiple loaders for same file", async () => {
using dir = tempDir("import-attrs-bundle", {
"data.json": JSON.stringify({ bundled: 789 }),
"entry.ts": `
import jsonData from "./data.json";
import textData from "./data.json" with { type: "text" };
export const json = jsonData;
export const text = textData;
export const different = jsonData !== textData;
`,
});
await using buildProc = Bun.spawn({
cmd: [bunExe(), "build", "entry.ts", "--outfile=out.js", "--format=esm"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [buildStdout, buildStderr, buildExitCode] = await Promise.all([
buildProc.stdout.text(),
buildProc.stderr.text(),
buildProc.exited,
]);
expect(buildStderr).not.toContain("error");
expect(buildExitCode).toBe(0);
// Now run the bundled output
await using runProc = Bun.spawn({
cmd: [bunExe(), "out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [runStdout, runStderr, runExitCode] = await Promise.all([
runProc.stdout.text(),
runProc.stderr.text(),
runProc.exited,
]);
expect(runStderr).not.toContain("error");
expect(runExitCode).toBe(0);
// Read the bundled output to verify both versions are included
const bundledCode = await Bun.file(String(dir) + "/out.js").text();
// Should contain the parsed JSON object
expect(bundledCode).toMatch(/bundled.*789/);
// Should also contain the raw text version
expect(bundledCode).toContain('"bundled":789');
});
test("same file with no attributes vs with attributes should be different", async () => {
using dir = tempDir("import-attrs-default", {
"data.json": JSON.stringify({ value: "test" }),
"test.ts": `
// Default import (should use .json loader based on extension)
import defaultImport from "./data.json";
// Explicit text import
import textImport from "./data.json" with { type: "text" };
console.log("Default type:", typeof defaultImport);
console.log("Text type:", typeof textImport);
console.log("Are different?:", defaultImport !== textImport);
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test.ts"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).not.toContain("error");
expect(exitCode).toBe(0);
// Default should be object (parsed JSON)
expect(stdout).toContain("Default type: object");
// Explicit text should be string
expect(stdout).toContain("Text type: string");
// They should be different
expect(stdout).toContain("Are different?: true");
});