Compare commits

...

2 Commits

Author SHA1 Message Date
Claude Bot
5d34604d63 fix: add dynamic import support for CJS splitting
- Export all named exports from dynamically imported chunks
- Handle default export correctly (export as 'default' not internal name)
- Add test for dynamic imports with CJS splitting

Note: ESM interop for default exports still needs work with __toESM wrapper

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-27 01:36:35 +00:00
Claude Bot
9dc4216062 feat: implement --splitting support with --format=cjs
This PR adds support for code splitting when using CommonJS output format. Previously, --splitting was only supported for ESM format.

Changes:
- Add cross-chunk export generation for CJS format (exports.name = value)
- Add cross-chunk import generation for CJS format (const {...} = require())
- Generate proper CommonJS modules that work with Node.js
- Add comprehensive tests for CJS splitting functionality

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-27 01:11:54 +00:00
3 changed files with 387 additions and 0 deletions

View File

@@ -348,6 +348,101 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun
repr.cross_chunk_suffix_stmts = stmts;
}
},
.cjs => {
// For CommonJS format, we need to export values using exports object
// If this chunk is dynamically imported, we need to export ALL its exports,
// not just the ones imported by other chunks
const is_dynamic_entry = c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index] == .dynamic_import;
if (is_dynamic_entry) {
// For dynamic imports, export all named exports from this chunk
const named_exports = c.graph.ast.items(.named_exports)[chunk.entry_point.source_index];
stable_ref_list.clearRetainingCapacity();
var iter = named_exports.iterator();
while (iter.next()) |entry| {
const ref = entry.value_ptr.ref;
// Skip unbound symbols
if (c.graph.symbols.get(ref)) |symbol| {
if (symbol.kind != .unbound) {
stable_ref_list.append(.{ .ref = ref, .stable_source_index = chunk.entry_point.source_index }) catch unreachable;
}
}
}
} else {
// For regular chunks, only export what's imported by other chunks
c.sortedCrossChunkExportItems(
chunk_meta.exports,
&stable_ref_list,
);
}
if (stable_ref_list.items.len == 0) continue;
repr.exports_to_other_chunks.ensureUnusedCapacity(c.allocator(), stable_ref_list.items.len) catch unreachable;
r.clearRetainingCapacity();
// For CommonJS chunks, we need to use the global exports object
// Create a ref for the exports object
const exports_ref = c.graph.generateNewSymbol(chunk.entry_point.source_index, .unbound, "exports");
var stmts = BabyList(js_ast.Stmt){};
for (stable_ref_list.items) |stable_ref| {
const ref = stable_ref.ref;
const symbol = c.graph.symbols.get(ref).?;
const alias = if (c.options.minify_identifiers)
try r.nextMinifiedName(c.allocator())
else
r.nextRenamedName(symbol.original_name);
// Store the export alias for cross-chunk imports
repr.exports_to_other_chunks.putAssumeCapacity(
ref,
alias,
);
// Check if this is the default export (usually has "_default" suffix in the internal name)
const export_name = if (std.mem.endsWith(u8, symbol.original_name, "_default") and is_dynamic_entry)
"default"
else
alias;
// Create exports.aliasName = localName;
const member = Expr.allocate(
c.allocator(),
E.Dot,
.{
.target = Expr.initIdentifier(exports_ref, Logger.Loc.Empty),
.name = export_name,
.name_loc = Logger.Loc.Empty,
},
Logger.Loc.Empty,
);
const stmt = Stmt.alloc(
S.SExpr,
.{
.value = Expr.init(
E.Binary,
.{
.op = .bin_assign,
.left = member,
.right = Expr.initIdentifier(ref, Logger.Loc.Empty),
},
Logger.Loc.Empty,
),
},
Logger.Loc.Empty,
);
stmts.append(c.allocator(), stmt) catch unreachable;
}
if (stmts.len > 0) {
repr.cross_chunk_suffix_stmts = stmts;
}
},
else => {},
}
}
@@ -405,6 +500,113 @@ fn computeCrossChunkDependenciesWithChunkMetas(c: *LinkerContext, chunks: []Chun
},
) catch unreachable;
},
.cjs => {
// For CommonJS format, we need to require() other chunks
const import_record_index = @as(u32, @intCast(cross_chunk_imports.len));
cross_chunk_imports.append(c.allocator(), .{
.import_kind = .stmt,
.chunk_index = cross_chunk_import.chunk_index,
}) catch unreachable;
// Generate: const {alias1, alias2, ...} = require('./chunk-name.js');
if (cross_chunk_import.sorted_import_items.len > 0) {
var properties = bun.BabyList(G.Property){};
for (cross_chunk_import.sorted_import_items.slice()) |item| {
properties.append(c.allocator(), .{
.key = Expr.init(
E.String,
.{ .data = item.export_alias },
Logger.Loc.Empty,
),
.value = Expr.initIdentifier(item.ref, Logger.Loc.Empty),
.kind = .normal,
}) catch unreachable;
}
// Create B.Object.Property array for destructuring
const b_props = c.allocator().alloc(B.Property, properties.len) catch unreachable;
for (b_props, properties.slice()) |*b_prop, g_prop| {
// Get the ref from the value expression
const ref = if (g_prop.value) |val|
if (val.data == .e_identifier) val.data.e_identifier.ref else Ref.None
else
Ref.None;
// Create B.Identifier for the binding
const identifier = c.allocator().create(B.Identifier) catch unreachable;
identifier.* = .{ .ref = ref };
b_prop.* = .{
.key = g_prop.key orelse Expr.init(E.String, .{ .data = "" }, Logger.Loc.Empty),
.value = Binding{
.loc = Logger.Loc.Empty,
.data = .{ .b_identifier = identifier },
},
};
}
// Create B.Object
const b_object = c.allocator().create(B.Object) catch unreachable;
b_object.* = .{
.properties = b_props,
.is_single_line = true,
};
const binding = Binding{
.loc = Logger.Loc.Empty,
.data = .{ .b_object = b_object },
};
const decl = c.allocator().create(G.Decl) catch unreachable;
decl.* = .{
.binding = binding,
.value = Expr.init(
E.RequireString,
.{
.import_record_index = import_record_index,
},
Logger.Loc.Empty,
),
};
const local_stmt = Stmt.allocate(
c.allocator(),
S.Local,
.{
.kind = .k_const,
.decls = bun.BabyList(G.Decl).fromSlice(c.allocator(), &[_]G.Decl{decl.*}) catch unreachable,
},
Logger.Loc.Empty,
);
cross_chunk_prefix_stmts.append(
c.allocator(),
local_stmt,
) catch unreachable;
} else {
// Just require the chunk for side effects
const require_stmt = Stmt.alloc(
S.SExpr,
.{
.value = Expr.init(
E.RequireString,
.{
.import_record_index = import_record_index,
},
Logger.Loc.Empty,
),
},
Logger.Loc.Empty,
);
cross_chunk_prefix_stmts.append(
c.allocator(),
require_stmt,
) catch unreachable;
}
},
else => {},
}
}
@@ -452,3 +654,8 @@ const debug = LinkerContext.debug;
const Logger = bun.logger;
const Loc = Logger.Loc;
const Expr = js_ast.Expr;
const E = js_ast.E;
const G = js_ast.G;
const B = js_ast.B;
const Binding = js_ast.Binding;

View File

@@ -0,0 +1,67 @@
import { test, expect } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
import { readdirSync } from "fs";
import { join } from "path";
test("--splitting with --format=cjs correctly exports from dynamic chunks", async () => {
using dir = tempDir("cjs-dynamic-exports", {
"entry.js": `
import('./module.js').then(m => {
console.log('foo:', m.foo);
console.log('bar:', m.bar());
console.log('default:', m.default());
});
`,
"module.js": `
export const foo = 'foo value';
export function bar() {
return 'bar result';
}
export default function() {
return 'default result';
}
`,
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"build",
"./entry.js",
"--splitting",
"--format=cjs",
"--outdir=dist",
],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
proc.stdout.text(),
proc.stderr.text(),
proc.exited,
]);
expect(exitCode).toBe(0);
expect(stderr).toBe("");
// Check that the module chunk has proper exports
const distPath = join(String(dir), "dist");
const files = readdirSync(distPath);
const moduleChunk = files.find(f => f.includes("module"));
expect(moduleChunk).toBeDefined();
const moduleContent = await Bun.file(join(distPath, moduleChunk!)).text();
expect(moduleContent).toContain("exports.foo = foo");
expect(moduleContent).toContain("exports.bar = bar");
expect(moduleContent).toContain("exports.default = ");
// Test that it actually runs correctly
const result = await Bun.$`node dist/entry.js`.cwd(String(dir)).text();
expect(result).toContain("foo: foo value");
expect(result).toContain("bar: bar result");
// Note: The default export handling might not be perfect due to ESM interop
});

View File

@@ -0,0 +1,113 @@
import { test, expect } from "bun:test";
import { bunEnv, bunExe, tempDir, normalizeBunSnapshot } from "harness";
import { readdirSync, readFileSync } from "fs";
import { join } from "path";
test("--splitting works with --format=cjs", async () => {
using dir = tempDir("cjs-splitting", {
"entry1.js": `
import { shared } from './shared.js';
console.log('entry1', shared());
`,
"entry2.js": `
import { shared } from './shared.js';
console.log('entry2', shared());
`,
"shared.js": `
export function shared() {
return 'shared value';
}
`,
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"build",
"./entry1.js",
"./entry2.js",
"--splitting",
"--format=cjs",
"--outdir=dist",
],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
proc.stdout.text(),
proc.stderr.text(),
proc.exited,
]);
expect(exitCode).toBe(0);
expect(stderr).toBe("");
// Check that files were created
const distPath = join(String(dir), "dist");
const files = readdirSync(distPath);
expect(files.sort()).toContain("entry1.js");
expect(files.sort()).toContain("entry2.js");
// The shared module should be split into its own chunk
const hasSharedChunk = files.some(f => f.includes("chunk") || f.startsWith("entry1-"));
expect(hasSharedChunk).toBe(true);
// Test that the generated CJS modules work
const entry1Result = await Bun.$`node dist/entry1.js`.cwd(String(dir)).text();
expect(entry1Result).toContain("entry1");
expect(entry1Result).toContain("shared value");
const entry2Result = await Bun.$`node dist/entry2.js`.cwd(String(dir)).text();
expect(entry2Result).toContain("entry2");
expect(entry2Result).toContain("shared value");
});
test("--splitting with --format=cjs handles dynamic imports", async () => {
using dir = tempDir("cjs-splitting-dynamic", {
"entry.js": `
console.log('before import');
import('./lazy.js').then(m => {
console.log('lazy loaded:', m.message);
});
`,
"lazy.js": `
export const message = 'lazy module loaded';
`,
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"build",
"./entry.js",
"--splitting",
"--format=cjs",
"--outdir=dist",
],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
proc.stdout.text(),
proc.stderr.text(),
proc.exited,
]);
expect(exitCode).toBe(0);
expect(stderr).toBe("");
// Check that files were created
const distPath = join(String(dir), "dist");
const files = readdirSync(distPath);
expect(files.sort()).toContain("entry.js");
// The lazy module should be split into its own chunk
const hasLazyChunk = files.some(f => f.includes("lazy") || f.startsWith("entry-"));
expect(hasLazyChunk).toBe(true);
});