mirror of
https://github.com/oven-sh/bun
synced 2026-02-26 11:37:26 +01:00
Compare commits
4 Commits
claude/fix
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5740da85ea | ||
|
|
32a89c4334 | ||
|
|
c643e0fad8 | ||
|
|
2222aa9f47 |
@@ -33,7 +33,7 @@ const stream = await renderToReadableStream(<Component message="Hello from serve
|
||||
|
||||
Combining this with `Bun.serve()`, we get a simple SSR HTTP server:
|
||||
|
||||
```tsx server.ts icon="/icons/typescript.svg"
|
||||
```tsx server.tsx icon="/icons/typescript.svg"
|
||||
Bun.serve({
|
||||
async fetch() {
|
||||
const stream = await renderToReadableStream(<Component message="Hello from server!" />);
|
||||
|
||||
@@ -144,9 +144,15 @@ static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES functionFuzzilli(JSC::JSGlob
|
||||
WTF::String output = arg1.toWTFString(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::jsUndefined()));
|
||||
|
||||
FILE* f = fdopen(REPRL_DWFD, "w");
|
||||
fprintf(f, "%s\n", output.utf8().data());
|
||||
fflush(f);
|
||||
// Use a static FILE* to avoid repeatedly calling fdopen (which
|
||||
// duplicates the descriptor and leaks) and to gracefully handle
|
||||
// the case where REPRL_DWFD is not open (i.e. running outside
|
||||
// the fuzzer harness).
|
||||
static FILE* f = fdopen(REPRL_DWFD, "w");
|
||||
if (f) {
|
||||
fprintf(f, "%s\n", output.utf8().data());
|
||||
fflush(f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2179,15 +2179,67 @@ pub const BundleV2 = struct {
|
||||
output_file.is_executable = true;
|
||||
}
|
||||
|
||||
// Write external sourcemap files next to the compiled executable and
|
||||
// keep them in the output array. Destroy all other non-entry-point files.
|
||||
// With --splitting, there can be multiple sourcemap files (one per chunk).
|
||||
var kept: usize = 0;
|
||||
for (output_files.items, 0..) |*current, i| {
|
||||
if (i != entry_point_index) {
|
||||
if (i == entry_point_index) {
|
||||
output_files.items[kept] = current.*;
|
||||
kept += 1;
|
||||
} else if (result == .success and current.output_kind == .sourcemap and current.value == .buffer) {
|
||||
const sourcemap_bytes = current.value.buffer.bytes;
|
||||
if (sourcemap_bytes.len > 0) {
|
||||
// Derive the .map filename from the sourcemap's own dest_path,
|
||||
// placed in the same directory as the compiled executable.
|
||||
const map_basename = if (current.dest_path.len > 0)
|
||||
bun.path.basename(current.dest_path)
|
||||
else
|
||||
bun.path.basename(bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}.map", .{full_outfile_path})));
|
||||
|
||||
const sourcemap_full_path = if (dirname.len == 0 or strings.eqlComptime(dirname, "."))
|
||||
bun.handleOom(bun.default_allocator.dupe(u8, map_basename))
|
||||
else
|
||||
bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}{c}{s}", .{ dirname, std.fs.path.sep, map_basename }));
|
||||
|
||||
// Write the sourcemap file to disk next to the executable
|
||||
var pathbuf: bun.PathBuffer = undefined;
|
||||
const write_path = if (Environment.isWindows) sourcemap_full_path else map_basename;
|
||||
switch (bun.jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
|
||||
&pathbuf,
|
||||
.{
|
||||
.data = .{ .buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(sourcemap_bytes.ptr),
|
||||
.len = @as(u32, @truncate(sourcemap_bytes.len)),
|
||||
.byte_len = @as(u32, @truncate(sourcemap_bytes.len)),
|
||||
},
|
||||
} },
|
||||
.encoding = .buffer,
|
||||
.dirfd = .fromStdDir(root_dir),
|
||||
.file = .{ .path = .{
|
||||
.string = bun.PathString.init(write_path),
|
||||
} },
|
||||
},
|
||||
)) {
|
||||
.err => |err| {
|
||||
bun.Output.err(err, "failed to write sourcemap file '{s}'", .{write_path});
|
||||
current.deinit();
|
||||
},
|
||||
.result => {
|
||||
current.dest_path = sourcemap_full_path;
|
||||
output_files.items[kept] = current.*;
|
||||
kept += 1;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
current.deinit();
|
||||
}
|
||||
} else {
|
||||
current.deinit();
|
||||
}
|
||||
}
|
||||
|
||||
const entry_point_output_file = output_files.swapRemove(entry_point_index);
|
||||
output_files.items.len = 1;
|
||||
output_files.items[0] = entry_point_output_file;
|
||||
output_files.items.len = kept;
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -4638,6 +4690,15 @@ pub const CompileResult = union(enum) {
|
||||
kind: Kind,
|
||||
};
|
||||
|
||||
/// Import info collected from **converted** statements during parallel
|
||||
/// printing. Only truly-external imports survive conversion (bundled ones
|
||||
/// are removed by `convertStmtsForChunk`), so these accurately reflect
|
||||
/// what the printer emits.
|
||||
pub const ImportInfoCollected = struct {
|
||||
import_record_index: u32,
|
||||
source_index: u32,
|
||||
};
|
||||
|
||||
javascript: struct {
|
||||
source_index: Index.Int,
|
||||
result: js_printer.PrintResult,
|
||||
@@ -4645,6 +4706,11 @@ pub const CompileResult = union(enum) {
|
||||
/// parallel printing. Used by postProcessJSChunk to populate ModuleInfo
|
||||
/// without re-scanning the original (unconverted) AST.
|
||||
decls: []const DeclInfo = &.{},
|
||||
/// Import info collected from converted statements during parallel
|
||||
/// printing. Used by postProcessJSChunk to populate ModuleInfo instead
|
||||
/// of re-scanning the original (unconverted) AST, which may contain
|
||||
/// bundled imports with unresolved source_index.
|
||||
imports: []const ImportInfoCollected = &.{},
|
||||
|
||||
pub fn code(this: @This()) []const u8 {
|
||||
return switch (this.result) {
|
||||
|
||||
@@ -640,20 +640,21 @@ pub fn generateCodeForFileInChunkJS(
|
||||
|
||||
pub const DeclCollector = struct {
|
||||
decls: std.ArrayListUnmanaged(CompileResult.DeclInfo) = .{},
|
||||
imports: std.ArrayListUnmanaged(CompileResult.ImportInfoCollected) = .{},
|
||||
allocator: std.mem.Allocator,
|
||||
source_index: u32 = 0,
|
||||
|
||||
const CompileResult = bun.bundle_v2.CompileResult;
|
||||
|
||||
/// Collect top-level declarations from **converted** statements (after
|
||||
/// `convertStmtsForChunk`). At that point, export statements have already
|
||||
/// been transformed:
|
||||
/// - `s_export_default` → `s_local` / `s_function` / `s_class`
|
||||
/// - `s_export_clause` → removed entirely
|
||||
/// - `s_export_from` / `s_export_star` → removed or converted to `s_import`
|
||||
/// Collect top-level declarations and import statements from **converted**
|
||||
/// statements (after `convertStmtsForChunk`). At that point:
|
||||
/// - Export statements have been transformed (export default → var, etc.)
|
||||
/// - Bundled imports have been removed by `shouldRemoveImportExportStmt`
|
||||
/// - Only truly-external imports (non-bundled) remain as `s_import`
|
||||
///
|
||||
/// Remaining `s_import` statements (external, non-bundled) don't need
|
||||
/// handling here; their bindings are recorded separately in
|
||||
/// `postProcessJSChunk` by scanning the original AST import records.
|
||||
/// This ensures that only imports actually present in the emitted code
|
||||
/// are recorded in ModuleInfo, avoiding mismatches when import records
|
||||
/// have unresolved `source_index` from the async resolution pipeline.
|
||||
pub fn collectFromStmts(self: *DeclCollector, stmts: []const Stmt, r: renamer.Renamer, c: *LinkerContext) void {
|
||||
for (stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
@@ -677,6 +678,12 @@ pub const DeclCollector = struct {
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_import => |s| {
|
||||
self.imports.append(self.allocator, .{
|
||||
.import_record_index = s.import_record_index,
|
||||
.source_index = self.source_index,
|
||||
}) catch return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
const runtimeRequireRef = if (c.options.output_format == .cjs) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref);
|
||||
|
||||
const collect_decls = c.options.generate_bytecode_cache and c.options.output_format == .esm and c.options.compile;
|
||||
var dc = DeclCollector{ .allocator = allocator };
|
||||
var dc = DeclCollector{ .allocator = allocator, .source_index = part_range.source_index.get() };
|
||||
|
||||
const result = c.generateCodeForFileInChunkJS(
|
||||
&buffer_writer,
|
||||
@@ -80,6 +80,7 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon
|
||||
.source_index = part_range.source_index.get(),
|
||||
.result = result,
|
||||
.decls = if (collect_decls) dc.decls.items else &.{},
|
||||
.imports = if (collect_decls) dc.imports.items else &.{},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -130,69 +130,78 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Collect truly-external imports from the original AST. Bundled imports
|
||||
// (where source_index is valid) are removed by convertStmtsForChunk and
|
||||
// re-created as cross-chunk imports — those are already captured by the
|
||||
// printer when it prints cross_chunk_prefix_stmts above. Only truly-external
|
||||
// imports (node built-ins, etc.) survive as s_import in per-file parts and
|
||||
// need recording here.
|
||||
const all_parts = c.graph.ast.items(.parts);
|
||||
const all_flags = c.graph.meta.items(.flags);
|
||||
// 2. Collect truly-external imports from **converted** statements.
|
||||
// DeclCollector captures s_import statements that survive
|
||||
// convertStmtsForChunk (which removes bundled imports). This avoids
|
||||
// scanning the original AST where import records may have unresolved
|
||||
// source_index from the async resolution pipeline, causing phantom
|
||||
// imports to be recorded in ModuleInfo that don't exist in the
|
||||
// emitted code.
|
||||
const all_import_records = c.graph.ast.items(.import_records);
|
||||
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
|
||||
if (all_flags[part_range.source_index.get()].wrap == .cjs) continue;
|
||||
const source_parts = all_parts[part_range.source_index.get()].slice();
|
||||
const source_import_records = all_import_records[part_range.source_index.get()].slice();
|
||||
var part_i = part_range.part_index_begin;
|
||||
while (part_i < part_range.part_index_end) : (part_i += 1) {
|
||||
for (source_parts[part_i].stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_import => |s| {
|
||||
const record = &source_import_records[s.import_record_index];
|
||||
if (record.path.is_disabled) continue;
|
||||
if (record.tag == .bun) continue;
|
||||
// Skip bundled imports — these are converted to cross-chunk
|
||||
// imports by the linker. The printer already recorded them
|
||||
// when printing cross_chunk_prefix_stmts.
|
||||
if (record.source_index.isValid()) continue;
|
||||
for (chunk.compile_results_for_chunk) |cr| {
|
||||
const imports = switch (cr) {
|
||||
.javascript => |js| js.imports,
|
||||
else => continue,
|
||||
};
|
||||
for (imports) |import_info| {
|
||||
const source_import_records = all_import_records[import_info.source_index].slice();
|
||||
if (import_info.import_record_index >= source_import_records.len) continue;
|
||||
const record = &source_import_records[import_info.import_record_index];
|
||||
if (record.path.is_disabled) continue;
|
||||
if (record.tag == .bun) continue;
|
||||
|
||||
const import_path = record.path.text;
|
||||
const irp_id = mi.str(import_path) catch continue;
|
||||
mi.requestModule(irp_id, .none) catch continue;
|
||||
|
||||
if (s.default_name) |name| {
|
||||
if (name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str("default") catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
// Look up the original s_import statement from the AST to
|
||||
// access default_name, items, and namespace_ref.
|
||||
const source_parts = c.graph.ast.items(.parts)[import_info.source_index].slice();
|
||||
const s_import = brk: {
|
||||
for (source_parts) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_import => |s| {
|
||||
if (s.import_record_index == import_info.import_record_index)
|
||||
break :brk s;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
for (s.items) |item| {
|
||||
if (item.name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str(item.alias) catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (record.flags.contains_import_star) {
|
||||
const local_name = chunk.renamer.nameForSymbol(s.namespace_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoNamespace(irp_id, local_name_id) catch continue;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
continue;
|
||||
};
|
||||
|
||||
const import_path = record.path.text;
|
||||
const irp_id = mi.str(import_path) catch continue;
|
||||
mi.requestModule(irp_id, .none) catch continue;
|
||||
|
||||
if (s_import.default_name) |name| {
|
||||
if (name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str("default") catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
for (s_import.items) |item| {
|
||||
if (item.name.ref) |name_ref| {
|
||||
const local_name = chunk.renamer.nameForSymbol(name_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoSingle(irp_id, mi.str(item.alias) catch continue, local_name_id, false) catch continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (record.flags.contains_import_star) {
|
||||
const local_name = chunk.renamer.nameForSymbol(s_import.namespace_ref);
|
||||
const local_name_id = mi.str(local_name) catch continue;
|
||||
mi.addVar(local_name_id, .lexical) catch continue;
|
||||
mi.addImportInfoNamespace(irp_id, local_name_id) catch continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Add wrapper-generated declarations (init_xxx, require_xxx) that are
|
||||
// not in any part statement.
|
||||
const all_flags = c.graph.meta.items(.flags);
|
||||
const all_wrapper_refs = c.graph.ast.items(.wrapper_ref);
|
||||
for (chunk.content.javascript.parts_in_chunk_in_order) |part_range| {
|
||||
const source_index = part_range.source_index.get();
|
||||
|
||||
@@ -546,6 +546,57 @@ pub const BuildCommand = struct {
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
// Write external sourcemap files next to the compiled executable.
|
||||
// With --splitting, there can be multiple .map files (one per chunk).
|
||||
if (this_transpiler.options.source_map == .external) {
|
||||
for (output_files) |f| {
|
||||
if (f.output_kind == .sourcemap and f.value == .buffer) {
|
||||
const sourcemap_bytes = f.value.buffer.bytes;
|
||||
if (sourcemap_bytes.len == 0) continue;
|
||||
|
||||
// Use the sourcemap's own dest_path basename if available,
|
||||
// otherwise fall back to {outfile}.map
|
||||
const map_basename = if (f.dest_path.len > 0)
|
||||
bun.path.basename(f.dest_path)
|
||||
else brk: {
|
||||
const exe_base = bun.path.basename(outfile);
|
||||
break :brk if (compile_target.os == .windows and !strings.hasSuffixComptime(exe_base, ".exe"))
|
||||
try std.fmt.allocPrint(allocator, "{s}.exe.map", .{exe_base})
|
||||
else
|
||||
try std.fmt.allocPrint(allocator, "{s}.map", .{exe_base});
|
||||
};
|
||||
|
||||
// root_dir already points to the outfile's parent directory,
|
||||
// so use map_basename (not a path with directory components)
|
||||
// to avoid writing to a doubled directory path.
|
||||
var pathbuf: bun.PathBuffer = undefined;
|
||||
switch (bun.jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
|
||||
&pathbuf,
|
||||
.{
|
||||
.data = .{ .buffer = .{
|
||||
.buffer = .{
|
||||
.ptr = @constCast(sourcemap_bytes.ptr),
|
||||
.len = @as(u32, @truncate(sourcemap_bytes.len)),
|
||||
.byte_len = @as(u32, @truncate(sourcemap_bytes.len)),
|
||||
},
|
||||
} },
|
||||
.encoding = .buffer,
|
||||
.dirfd = .fromStdDir(root_dir),
|
||||
.file = .{ .path = .{
|
||||
.string = bun.PathString.init(map_basename),
|
||||
} },
|
||||
},
|
||||
)) {
|
||||
.err => |err| {
|
||||
Output.err(err, "failed to write sourcemap file '{s}'", .{map_basename});
|
||||
had_err = true;
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const compiled_elapsed = @divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bundled_end)), @as(i64, std.time.ns_per_ms));
|
||||
const compiled_elapsed_digit_count: isize = switch (compiled_elapsed) {
|
||||
0...9 => 3,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, tempDir } from "harness";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
describe("Bun.build compile with sourcemap", () => {
|
||||
@@ -26,9 +26,9 @@ main();`,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.outputs.length).toBe(1);
|
||||
|
||||
const executablePath = result.outputs[0].path;
|
||||
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
|
||||
const executablePath = executableOutput.path;
|
||||
expect(await Bun.file(executablePath).exists()).toBe(true);
|
||||
|
||||
// Run the compiled executable and capture the error
|
||||
@@ -94,6 +94,167 @@ main();`,
|
||||
expect(exitCode).not.toBe(0);
|
||||
});
|
||||
|
||||
test("compile with sourcemap: external writes .map file to disk", async () => {
|
||||
using dir = tempDir("build-compile-sourcemap-external-file", helperFiles);
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: [join(String(dir), "app.js")],
|
||||
compile: true,
|
||||
sourcemap: "external",
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
|
||||
const executablePath = executableOutput.path;
|
||||
expect(await Bun.file(executablePath).exists()).toBe(true);
|
||||
|
||||
// The sourcemap output should appear in build result outputs
|
||||
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
|
||||
expect(sourcemapOutputs.length).toBe(1);
|
||||
|
||||
// The .map file should exist next to the executable
|
||||
const mapPath = sourcemapOutputs[0].path;
|
||||
expect(mapPath).toEndWith(".map");
|
||||
expect(await Bun.file(mapPath).exists()).toBe(true);
|
||||
|
||||
// Validate the sourcemap is valid JSON with expected fields
|
||||
const mapContent = JSON.parse(await Bun.file(mapPath).text());
|
||||
expect(mapContent.version).toBe(3);
|
||||
expect(mapContent.sources).toBeArray();
|
||||
expect(mapContent.sources.length).toBeGreaterThan(0);
|
||||
expect(mapContent.mappings).toBeString();
|
||||
});
|
||||
|
||||
test("compile without sourcemap does not write .map file", async () => {
|
||||
using dir = tempDir("build-compile-no-sourcemap-file", {
|
||||
"nosourcemap_entry.js": helperFiles["app.js"],
|
||||
"helper.js": helperFiles["helper.js"],
|
||||
});
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: [join(String(dir), "nosourcemap_entry.js")],
|
||||
compile: true,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
|
||||
const executablePath = executableOutput.path;
|
||||
// No .map file should exist next to the executable
|
||||
expect(await Bun.file(`${executablePath}.map`).exists()).toBe(false);
|
||||
// No sourcemap outputs should be in the result
|
||||
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
|
||||
expect(sourcemapOutputs.length).toBe(0);
|
||||
});
|
||||
|
||||
test("compile with splitting and external sourcemap writes multiple .map files", async () => {
|
||||
using dir = tempDir("build-compile-sourcemap-splitting", {
|
||||
"entry.js": `
|
||||
const mod = await import("./lazy.js");
|
||||
mod.greet();
|
||||
`,
|
||||
"lazy.js": `
|
||||
export function greet() {
|
||||
console.log("hello from lazy module");
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: [join(String(dir), "entry.js")],
|
||||
compile: true,
|
||||
splitting: true,
|
||||
sourcemap: "external",
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
|
||||
const executablePath = executableOutput.path;
|
||||
expect(await Bun.file(executablePath).exists()).toBe(true);
|
||||
|
||||
// With splitting and a dynamic import, there should be at least 2 sourcemaps
|
||||
// (one for the entry chunk, one for the lazy-loaded chunk)
|
||||
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
|
||||
expect(sourcemapOutputs.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Each sourcemap should be a valid .map file on disk
|
||||
const mapPaths = new Set<string>();
|
||||
for (const sm of sourcemapOutputs) {
|
||||
expect(sm.path).toEndWith(".map");
|
||||
expect(await Bun.file(sm.path).exists()).toBe(true);
|
||||
|
||||
// Each map file should have a unique path (no overwrites)
|
||||
expect(mapPaths.has(sm.path)).toBe(false);
|
||||
mapPaths.add(sm.path);
|
||||
|
||||
// Validate the sourcemap is valid JSON
|
||||
const mapContent = JSON.parse(await Bun.file(sm.path).text());
|
||||
expect(mapContent.version).toBe(3);
|
||||
expect(mapContent.mappings).toBeString();
|
||||
}
|
||||
|
||||
// Run the compiled executable to ensure it works
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [executablePath],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("hello from lazy module");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("compile with --outfile subdir/myapp writes .map next to executable", async () => {
|
||||
using dir = tempDir("build-compile-sourcemap-outfile-subdir", helperFiles);
|
||||
|
||||
const subdirPath = join(String(dir), "subdir");
|
||||
const exeSuffix = process.platform === "win32" ? ".exe" : "";
|
||||
|
||||
// Use CLI: bun build --compile --outfile subdir/myapp --sourcemap=external
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"build",
|
||||
"--compile",
|
||||
join(String(dir), "app.js"),
|
||||
"--outfile",
|
||||
join(subdirPath, "myapp"),
|
||||
"--sourcemap=external",
|
||||
],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [_stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stderr).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// The executable should be at subdir/myapp (with .exe on Windows)
|
||||
expect(await Bun.file(join(subdirPath, `myapp${exeSuffix}`)).exists()).toBe(true);
|
||||
|
||||
// The .map file should be in subdir/ (next to the executable)
|
||||
const glob = new Bun.Glob("*.map");
|
||||
const mapFiles = Array.from(glob.scanSync({ cwd: subdirPath }));
|
||||
expect(mapFiles.length).toBe(1);
|
||||
|
||||
// Validate the sourcemap is valid JSON
|
||||
const mapContent = JSON.parse(await Bun.file(join(subdirPath, mapFiles[0])).text());
|
||||
expect(mapContent.version).toBe(3);
|
||||
expect(mapContent.mappings).toBeString();
|
||||
|
||||
// Verify no .map was written into the doubled path subdir/subdir/
|
||||
expect(await Bun.file(join(String(dir), "subdir", "subdir", "myapp.map")).exists()).toBe(false);
|
||||
});
|
||||
|
||||
test("compile with multiple source files", async () => {
|
||||
using dir = tempDir("build-compile-sourcemap-multiple-files", {
|
||||
"utils.js": `export function utilError() {
|
||||
|
||||
@@ -172,6 +172,35 @@ describe("bundler", () => {
|
||||
},
|
||||
stdout: "a b",
|
||||
},
|
||||
{
|
||||
// #27454: Named imports from packages with re-export index files.
|
||||
// The package index imports from internal modules and re-exports them.
|
||||
// When bundled, these internal imports are inlined, but the original AST
|
||||
// import records may have unresolved source_index. The ModuleInfo must
|
||||
// not record these phantom imports.
|
||||
name: "PackageReExportNamedImport",
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
import { greet } from "fake-pkg";
|
||||
console.log(greet("world"));
|
||||
`,
|
||||
"/node_modules/fake-pkg/package.json": `{"name":"fake-pkg","version":"1.0.0","type":"module","exports":{".":{"import":"./libesm/index.js"}}}`,
|
||||
"/node_modules/fake-pkg/libesm/index.js": [
|
||||
`import { greet } from './utils/greet.js';`,
|
||||
`import { format } from './utils/format.js';`,
|
||||
`import { upper } from './utils/upper.js';`,
|
||||
`import { lower } from './utils/lower.js';`,
|
||||
`import { trim } from './utils/trim.js';`,
|
||||
`export { greet, format, upper, lower, trim };`,
|
||||
].join("\n"),
|
||||
"/node_modules/fake-pkg/libesm/utils/greet.js": `import { format } from './format.js';\nexport function greet(name) { return format("Hello, " + name + "!"); }`,
|
||||
"/node_modules/fake-pkg/libesm/utils/format.js": `export function format(str) { return "[" + str + "]"; }`,
|
||||
"/node_modules/fake-pkg/libesm/utils/upper.js": `export function upper(str) { return str.toUpperCase(); }`,
|
||||
"/node_modules/fake-pkg/libesm/utils/lower.js": `export function lower(str) { return str.toLowerCase(); }`,
|
||||
"/node_modules/fake-pkg/libesm/utils/trim.js": `export function trim(str) { return str.trim(); }`,
|
||||
},
|
||||
stdout: "[Hello, world!]",
|
||||
},
|
||||
];
|
||||
|
||||
for (const scenario of esmBytecodeScenarios) {
|
||||
|
||||
Reference in New Issue
Block a user