diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 0da83a40e6..9b045a1fad 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -431,6 +431,27 @@ pub const StandaloneModuleGraph = struct { } }; + if (comptime bun.Environment.is_canary or bun.Environment.isDebug) { + if (bun.getenvZ("BUN_FEATURE_FLAG_DUMP_CODE")) |dump_code_dir| { + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const dest_z = bun.path.joinAbsStringBufZ(dump_code_dir, buf, &.{dest_path}, .auto); + + // Scoped block to handle dump failures without skipping module emission + dump: { + const file = bun.sys.File.makeOpen(dest_z, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664).unwrap() catch |err| { + Output.prettyErrorln("error: failed to open {s}: {s}", .{ dest_path, @errorName(err) }); + break :dump; + }; + defer file.close(); + file.writeAll(output_file.value.buffer.bytes).unwrap() catch |err| { + Output.prettyErrorln("error: failed to write {s}: {s}", .{ dest_path, @errorName(err) }); + break :dump; + }; + } + } + } + var module = CompiledModuleGraphFile{ .name = string_builder.fmtAppendCountZ("{s}{s}", .{ prefix, diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index e45dc06831..9264eeafa6 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -2334,6 +2334,7 @@ pub fn finalizeBundle( result.chunks, null, false, + false, ); // Create an entry for this file. diff --git a/src/bundler/Chunk.zig b/src/bundler/Chunk.zig index ac17d003f3..cdf6eec74c 100644 --- a/src/bundler/Chunk.zig +++ b/src/bundler/Chunk.zig @@ -142,6 +142,7 @@ pub const Chunk = struct { chunk: *Chunk, chunks: []Chunk, display_size: ?*usize, + force_absolute_path: bool, enable_source_map_shifts: bool, ) bun.OOM!CodeResult { return switch (enable_source_map_shifts) { @@ -153,6 +154,7 @@ pub const Chunk = struct { chunk, chunks, display_size, + force_absolute_path, source_map_shifts, ), }; @@ -167,10 +169,13 @@ pub const Chunk = struct { chunk: *Chunk, chunks: []Chunk, display_size: ?*usize, + force_absolute_path: bool, comptime enable_source_map_shifts: bool, ) bun.OOM!CodeResult { const additional_files = graph.input_files.items(.additional_files); const unique_key_for_additional_files = graph.input_files.items(.unique_key_for_additional_file); + const relative_platform_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(relative_platform_buf); switch (this.*) { .pieces => |*pieces| { const entry_point_chunks_for_scb = linker_graph.files.items(.entry_point_chunk_index); @@ -224,10 +229,10 @@ pub const Chunk = struct { const cheap_normalizer = cheapPrefixNormalizer( import_prefix, - if (from_chunk_dir.len == 0) + if (from_chunk_dir.len == 0 or force_absolute_path) file_path else - bun.path.relativePlatform(from_chunk_dir, file_path, .posix, false), + bun.path.relativePlatformBuf(relative_platform_buf, from_chunk_dir, file_path, .posix, false), ); count += cheap_normalizer[0].len + cheap_normalizer[1].len; }, @@ -316,10 +321,10 @@ pub const Chunk = struct { bun.path.platformToPosixInPlace(u8, @constCast(file_path)); const cheap_normalizer = cheapPrefixNormalizer( import_prefix, - if (from_chunk_dir.len == 0) + if (from_chunk_dir.len == 0 or force_absolute_path) file_path else - bun.path.relativePlatform(from_chunk_dir, file_path, .posix, false), + bun.path.relativePlatformBuf(relative_platform_buf, from_chunk_dir, file_path, .posix, false), ); if (cheap_normalizer[0].len > 0) { diff --git a/src/bundler/linker_context/generateChunksInParallel.zig b/src/bundler/linker_context/generateChunksInParallel.zig index 1cc1a05bf1..dcb091fb0c 100644 --- a/src/bundler/linker_context/generateChunksInParallel.zig +++ b/src/bundler/linker_context/generateChunksInParallel.zig @@ -340,6 +340,7 @@ pub fn generateChunksInParallel( chunk, chunks, &display_size, + c.resolver.opts.compile and !chunk.is_browser_chunk_from_server_build, chunk.content.sourcemap(c.options.source_maps) != .none, ); var code_result = _code_result catch @panic("Failed to allocate memory for output file"); diff --git a/src/bundler/linker_context/writeOutputFilesToDisk.zig b/src/bundler/linker_context/writeOutputFilesToDisk.zig index 2592cb57af..e49fd8c7e1 100644 --- a/src/bundler/linker_context/writeOutputFilesToDisk.zig +++ b/src/bundler/linker_context/writeOutputFilesToDisk.zig @@ -73,6 +73,7 @@ pub fn writeOutputFilesToDisk( chunk, chunks, &display_size, + c.resolver.opts.compile and !chunk.is_browser_chunk_from_server_build, chunk.content.sourcemap(c.options.source_maps) != .none, ) catch |err| bun.Output.panic("Failed to create output chunk: {s}", .{@errorName(err)}); diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 6637e7007f..5590ef1581 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -96,12 +96,6 @@ pub const BuildCommand = struct { var was_renamed_from_index = false; if (ctx.bundler_options.compile) { - if (ctx.bundler_options.code_splitting) { - Output.prettyErrorln("error: cannot use --compile with --splitting", .{}); - Global.exit(1); - return; - } - if (ctx.bundler_options.outdir.len > 0) { Output.prettyErrorln("error: cannot use --compile with --outdir", .{}); Global.exit(1); diff --git a/test/bundler/bundler_compile_splitting.test.ts b/test/bundler/bundler_compile_splitting.test.ts new file mode 100644 index 0000000000..f80d0bfdc9 --- /dev/null +++ b/test/bundler/bundler_compile_splitting.test.ts @@ -0,0 +1,40 @@ +import { describe } from "bun:test"; +import { itBundled } from "./expectBundled"; + +describe("bundler", () => { + describe("compile with splitting", () => { + itBundled("compile/splitting/RelativePathsAcrossChunks", { + compile: true, + splitting: true, + backend: "cli", + files: { + "/src/app/entry.ts": /* js */ ` + console.log('app entry'); + import('../components/header').then(m => m.render()); + `, + "/src/components/header.ts": /* js */ ` + export async function render() { + console.log('header rendering'); + const nav = await import('./nav/menu'); + nav.show(); + } + `, + "/src/components/nav/menu.ts": /* js */ ` + export async function show() { + console.log('menu showing'); + const items = await import('./items'); + console.log('items:', items.list); + } + `, + "/src/components/nav/items.ts": /* js */ ` + export const list = ['home', 'about', 'contact'].join(','); + `, + }, + entryPoints: ["/src/app/entry.ts"], + outdir: "/build", + run: { + stdout: "app entry\nheader rendering\nmenu showing\nitems: home,about,contact", + }, + }); + }); +}); diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index a45de6d53a..972f5feff8 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -524,7 +524,15 @@ function expectBundled( if (metafile === true) metafile = "/metafile.json"; if (bundleErrors === true) bundleErrors = {}; if (bundleWarnings === true) bundleWarnings = {}; - const useOutFile = generateOutput == false ? false : outfile ? true : outdir ? false : entryPoints.length === 1; + const useOutFile = compile + ? true + : generateOutput == false + ? false + : outfile + ? true + : outdir + ? false + : entryPoints.length === 1; if (bundling === false && entryPoints.length > 1) { throw new Error("bundling:false only supports a single entry point"); @@ -1087,14 +1095,16 @@ function expectBundled( define: define ?? {}, throw: _throw ?? false, compile, - jsx: jsx ? { - runtime: jsx.runtime, - importSource: jsx.importSource, - factory: jsx.factory, - fragment: jsx.fragment, - sideEffects: jsx.sideEffects, - development: jsx.development, - } : undefined, + jsx: jsx + ? { + runtime: jsx.runtime, + importSource: jsx.importSource, + factory: jsx.factory, + fragment: jsx.fragment, + sideEffects: jsx.sideEffects, + development: jsx.development, + } + : undefined, } as BuildConfig; if (dotenv) {