mirror of
https://github.com/oven-sh/bun
synced 2026-02-04 07:58:54 +00:00
Compare commits
6 Commits
dylan/byte
...
jarred/imp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3fb34295b9 | ||
|
|
08018e8159 | ||
|
|
75bb5da560 | ||
|
|
7a421cb4bb | ||
|
|
f0424c0665 | ||
|
|
c60b0519f4 |
@@ -882,6 +882,7 @@ pub const JSBundler = struct {
|
||||
success: struct {
|
||||
source_code: []const u8 = "",
|
||||
loader: options.Loader = .file,
|
||||
sourcemap: ?[]const u8 = null,
|
||||
},
|
||||
pending,
|
||||
no_match,
|
||||
@@ -892,6 +893,9 @@ pub const JSBundler = struct {
|
||||
switch (this.*) {
|
||||
.success => |success| {
|
||||
bun.default_allocator.free(success.source_code);
|
||||
if (success.sourcemap) |sm| {
|
||||
bun.default_allocator.free(sm);
|
||||
}
|
||||
},
|
||||
.err => |*err| {
|
||||
err.deinit(bun.default_allocator);
|
||||
@@ -970,6 +974,7 @@ pub const JSBundler = struct {
|
||||
_: *anyopaque,
|
||||
source_code_value: JSValue,
|
||||
loader_as_int: JSValue,
|
||||
sourcemap_value: JSValue,
|
||||
) void {
|
||||
jsc.markBinding(@src());
|
||||
if (source_code_value.isEmptyOrUndefinedOrNull() or loader_as_int.isEmptyOrUndefinedOrNull()) {
|
||||
@@ -994,10 +999,26 @@ pub const JSBundler = struct {
|
||||
|
||||
@panic("Unexpected: source_code is not a string");
|
||||
};
|
||||
|
||||
const sourcemap = if (!sourcemap_value.isEmptyOrUndefinedOrNull())
|
||||
bun.JSC.Node.StringOrBuffer.fromJSToOwnedSlice(global, sourcemap_value, bun.default_allocator) catch |err| {
|
||||
switch (err) {
|
||||
error.OutOfMemory => {
|
||||
bun.outOfMemory();
|
||||
},
|
||||
error.JSError => {},
|
||||
}
|
||||
|
||||
@panic("Unexpected: sourcemap is not a string");
|
||||
}
|
||||
else
|
||||
null;
|
||||
|
||||
this.value = .{
|
||||
.success = .{
|
||||
.loader = options.Loader.fromAPI(loader),
|
||||
.source_code = source_code,
|
||||
.sourcemap = sourcemap,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ extern "C" void OnBeforeParseResult__reset(OnBeforeParseResult* result);
|
||||
|
||||
/// These are callbacks defined in Zig and to be run after their associated JS version is run
|
||||
extern "C" void JSBundlerPlugin__addError(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onLoadAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onLoadAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onResolveAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onVirtualModulePlugin(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" JSC::EncodedJSValue JSBundlerPlugin__onDefer(void*, JSC::JSGlobalObject*);
|
||||
@@ -111,7 +111,7 @@ bool BundlerPlugin::anyMatchesCrossThread(JSC::VM& vm, const BunString* namespac
|
||||
static const HashTableValue JSBundlerPluginHashTable[] = {
|
||||
{ "addFilter"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_addFilter, 3 } },
|
||||
{ "addError"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_addError, 3 } },
|
||||
{ "onLoadAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onLoadAsync, 3 } },
|
||||
{ "onLoadAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onLoadAsync, 4 } },
|
||||
{ "onResolveAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onResolveAsync, 4 } },
|
||||
{ "onBeforeParse"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onBeforeParse, 4 } },
|
||||
{ "generateDeferPromise"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_generateDeferPromise, 0 } },
|
||||
@@ -402,7 +402,8 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_onLoadAsync, (JSC::JSGlobalObje
|
||||
UNWRAP_BUNDLER_PLUGIN(callFrame),
|
||||
thisObject->plugin.config,
|
||||
JSValue::encode(callFrame->argument(1)),
|
||||
JSValue::encode(callFrame->argument(2)));
|
||||
JSValue::encode(callFrame->argument(2)),
|
||||
JSValue::encode(callFrame->argument(3)));
|
||||
}
|
||||
|
||||
return JSC::JSValue::encode(JSC::jsUndefined());
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
#include <JavaScriptCore/Yarr.h>
|
||||
|
||||
typedef void (*JSBundlerPluginAddErrorCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnResolveAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginNativeOnBeforeParseCallback)(const OnBeforeParseArguments*, OnBeforeParseResult*);
|
||||
|
||||
|
||||
@@ -69,6 +69,10 @@ pub const InputFile = struct {
|
||||
unique_key_for_additional_file: string = "",
|
||||
content_hash_for_additional_file: u64 = 0,
|
||||
is_plugin_file: bool = false,
|
||||
sourcemap: ?*bun.sourcemap.ParsedSourceMap = null,
|
||||
/// Index to InputFile containing the original source content (for sourcemap support)
|
||||
/// 0 means no original source, valid indices start at 1
|
||||
original_source_index: Index.Int = 0,
|
||||
};
|
||||
|
||||
pub inline fn pathToSourceIndexMap(this: *Graph, target: options.Target) *PathToSourceIndexMap {
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
pub const LinkerContext = struct {
|
||||
pub const debug = Output.scoped(.LinkerCtx, .visible);
|
||||
pub const debug = Output.scoped(.LinkerCtx, false);
|
||||
pub const CompileResult = bundler.CompileResult;
|
||||
|
||||
pub const OutputFileListBuilder = @import("./linker_context/OutputFileListBuilder.zig");
|
||||
pub const StaticRouteVisitor = @import("./linker_context/StaticRouteVisitor.zig");
|
||||
|
||||
parse_graph: *Graph = undefined,
|
||||
graph: LinkerGraph = undefined,
|
||||
allocator: std.mem.Allocator = undefined,
|
||||
log: *Logger.Log = undefined,
|
||||
|
||||
resolver: *Resolver = undefined,
|
||||
@@ -21,6 +19,10 @@ pub const LinkerContext = struct {
|
||||
|
||||
options: LinkerOptions = .{},
|
||||
|
||||
wait_group: ThreadPoolLib.WaitGroup = .{},
|
||||
|
||||
ambiguous_result_pool: std.ArrayList(MatchImport) = undefined,
|
||||
|
||||
loop: EventLoop,
|
||||
|
||||
/// string buffer containing pre-formatted unique keys
|
||||
@@ -44,12 +46,8 @@ pub const LinkerContext = struct {
|
||||
|
||||
mangled_props: MangledProps = .{},
|
||||
|
||||
pub fn allocator(this: *const LinkerContext) std.mem.Allocator {
|
||||
return this.graph.allocator;
|
||||
}
|
||||
|
||||
pub fn pathWithPrettyInitialized(this: *LinkerContext, path: Fs.Path) !Fs.Path {
|
||||
return bundler.genericPathWithPrettyInitialized(path, this.options.target, this.resolver.fs.top_level_dir, this.allocator());
|
||||
return bundler.genericPathWithPrettyInitialized(path, this.options.target, this.resolver.fs.top_level_dir, this.graph.allocator);
|
||||
}
|
||||
|
||||
pub const LinkerOptions = struct {
|
||||
@@ -78,10 +76,10 @@ pub const LinkerContext = struct {
|
||||
};
|
||||
|
||||
pub const SourceMapData = struct {
|
||||
line_offset_wait_group: sync.WaitGroup = .init(),
|
||||
line_offset_wait_group: sync.WaitGroup = .{},
|
||||
line_offset_tasks: []Task = &.{},
|
||||
|
||||
quoted_contents_wait_group: sync.WaitGroup = .init(),
|
||||
quoted_contents_wait_group: sync.WaitGroup = .{},
|
||||
quoted_contents_tasks: []Task = &.{},
|
||||
|
||||
pub const Task = struct {
|
||||
@@ -115,16 +113,16 @@ pub const LinkerContext = struct {
|
||||
// was generated. This will be preserved so that remapping
|
||||
// stack traces can show the source code, even after incremental
|
||||
// rebuilds occur.
|
||||
const alloc = if (worker.ctx.transpiler.options.dev_server) |dev|
|
||||
dev.allocator()
|
||||
const allocator = if (worker.ctx.transpiler.options.dev_server) |dev|
|
||||
dev.allocator
|
||||
else
|
||||
worker.allocator;
|
||||
|
||||
SourceMapData.computeQuotedSourceContents(task.ctx, alloc, task.source_index);
|
||||
SourceMapData.computeQuotedSourceContents(task.ctx, allocator, task.source_index);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn computeLineOffsets(this: *LinkerContext, alloc: std.mem.Allocator, source_index: Index.Int) void {
|
||||
pub fn computeLineOffsets(this: *LinkerContext, allocator: std.mem.Allocator, source_index: Index.Int) void {
|
||||
debug("Computing LineOffsetTable: {d}", .{source_index});
|
||||
const line_offset_table: *bun.sourcemap.LineOffsetTable.List = &this.graph.files.items(.line_offset_table)[source_index];
|
||||
|
||||
@@ -140,7 +138,7 @@ pub const LinkerContext = struct {
|
||||
const approximate_line_count = this.graph.ast.items(.approximate_newline_count)[source_index];
|
||||
|
||||
line_offset_table.* = bun.sourcemap.LineOffsetTable.generate(
|
||||
alloc,
|
||||
allocator,
|
||||
source.contents,
|
||||
|
||||
// We don't support sourcemaps for source files with more than 2^31 lines
|
||||
@@ -148,22 +146,24 @@ pub const LinkerContext = struct {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn computeQuotedSourceContents(this: *LinkerContext, _: std.mem.Allocator, source_index: Index.Int) void {
|
||||
pub fn computeQuotedSourceContents(this: *LinkerContext, allocator: std.mem.Allocator, source_index: Index.Int) void {
|
||||
debug("Computing Quoted Source Contents: {d}", .{source_index});
|
||||
const quoted_source_contents = &this.graph.files.items(.quoted_source_contents)[source_index];
|
||||
if (quoted_source_contents.take()) |old| {
|
||||
old.deinit();
|
||||
}
|
||||
|
||||
const loader: options.Loader = this.parse_graph.input_files.items(.loader)[source_index];
|
||||
const quoted_source_contents: *string = &this.graph.files.items(.quoted_source_contents)[source_index];
|
||||
if (!loader.canHaveSourceMap()) {
|
||||
quoted_source_contents.* = "";
|
||||
return;
|
||||
}
|
||||
|
||||
const source: *const Logger.Source = &this.parse_graph.input_files.items(.source)[source_index];
|
||||
var mutable = MutableString.initEmpty(bun.default_allocator);
|
||||
js_printer.quoteForJSON(source.contents, &mutable, false) catch bun.outOfMemory();
|
||||
quoted_source_contents.* = mutable.toDefaultOwned().toOptional();
|
||||
// Check if we have an original source from sourcemap
|
||||
const original_source_index = this.parse_graph.input_files.items(.original_source_index)[source_index];
|
||||
const source: *const Logger.Source = if (original_source_index != 0)
|
||||
&this.parse_graph.input_files.items(.source)[original_source_index]
|
||||
else
|
||||
&this.parse_graph.input_files.items(.source)[source_index];
|
||||
|
||||
const mutable = MutableString.initEmpty(allocator);
|
||||
quoted_source_contents.* = (js_printer.quoteForJSON(source.contents, mutable, false) catch bun.outOfMemory()).list.items;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -205,7 +205,7 @@ pub const LinkerContext = struct {
|
||||
this.log = bundle.transpiler.log;
|
||||
|
||||
this.resolver = &bundle.transpiler.resolver;
|
||||
this.cycle_detector = std.ArrayList(ImportTracker).init(this.allocator());
|
||||
this.cycle_detector = std.ArrayList(ImportTracker).init(this.allocator);
|
||||
|
||||
this.graph.reachable_files = reachable;
|
||||
|
||||
@@ -213,6 +213,8 @@ pub const LinkerContext = struct {
|
||||
|
||||
try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys());
|
||||
bundle.dynamic_import_entry_points.deinit();
|
||||
this.wait_group.init();
|
||||
this.ambiguous_result_pool = std.ArrayList(MatchImport).init(this.allocator);
|
||||
|
||||
var runtime_named_exports = &this.graph.ast.items(.named_exports)[Index.runtime.get()];
|
||||
|
||||
@@ -256,10 +258,12 @@ pub const LinkerContext = struct {
|
||||
reachable: []const Index.Int,
|
||||
) void {
|
||||
bun.assert(this.options.source_maps != .none);
|
||||
this.source_maps.line_offset_wait_group = .initWithCount(reachable.len);
|
||||
this.source_maps.quoted_contents_wait_group = .initWithCount(reachable.len);
|
||||
this.source_maps.line_offset_tasks = this.allocator().alloc(SourceMapData.Task, reachable.len) catch unreachable;
|
||||
this.source_maps.quoted_contents_tasks = this.allocator().alloc(SourceMapData.Task, reachable.len) catch unreachable;
|
||||
this.source_maps.line_offset_wait_group.init();
|
||||
this.source_maps.quoted_contents_wait_group.init();
|
||||
this.source_maps.line_offset_wait_group.counter = @as(u32, @truncate(reachable.len));
|
||||
this.source_maps.quoted_contents_wait_group.counter = @as(u32, @truncate(reachable.len));
|
||||
this.source_maps.line_offset_tasks = this.allocator.alloc(SourceMapData.Task, reachable.len) catch unreachable;
|
||||
this.source_maps.quoted_contents_tasks = this.allocator.alloc(SourceMapData.Task, reachable.len) catch unreachable;
|
||||
|
||||
var batch = ThreadPoolLib.Batch{};
|
||||
var second_batch = ThreadPoolLib.Batch{};
|
||||
@@ -285,7 +289,7 @@ pub const LinkerContext = struct {
|
||||
}
|
||||
|
||||
pub fn scheduleTasks(this: *LinkerContext, batch: ThreadPoolLib.Batch) void {
|
||||
_ = this.pending_task_count.fetchAdd(@as(u32, @intCast(batch.len)), .monotonic);
|
||||
_ = this.pending_task_count.fetchAdd(@as(u32, @truncate(batch.len)), .monotonic);
|
||||
this.parse_graph.pool.worker_pool.schedule(batch);
|
||||
}
|
||||
|
||||
@@ -308,7 +312,7 @@ pub const LinkerContext = struct {
|
||||
@panic("Assertion failed: HTML import file not found in pathToSourceIndexMap");
|
||||
};
|
||||
|
||||
html_source_indices.push(this.allocator(), source_index) catch bun.outOfMemory();
|
||||
html_source_indices.push(this.graph.allocator, source_index) catch bun.outOfMemory();
|
||||
|
||||
// S.LazyExport is a call to __jsonParse.
|
||||
const original_ref = parts[html_import]
|
||||
@@ -361,36 +365,6 @@ pub const LinkerContext = struct {
|
||||
this.checkForMemoryCorruption();
|
||||
}
|
||||
|
||||
// Validate top-level await for all files first, like esbuild does.
|
||||
// This must be done before scanImportsAndExports to ensure async
|
||||
// status is properly propagated through cyclic imports.
|
||||
{
|
||||
const import_records_list: []ImportRecord.List = this.graph.ast.items(.import_records);
|
||||
const tla_keywords = this.parse_graph.ast.items(.top_level_await_keyword);
|
||||
const tla_checks = this.parse_graph.ast.items(.tla_check);
|
||||
const input_files = this.parse_graph.input_files.items(.source);
|
||||
const flags: []JSMeta.Flags = this.graph.meta.items(.flags);
|
||||
const css_asts: []?*bun.css.BundlerStyleSheet = this.graph.ast.items(.css);
|
||||
|
||||
// Process all files in source index order, like esbuild does
|
||||
var source_index: u32 = 0;
|
||||
while (source_index < this.graph.files.len) : (source_index += 1) {
|
||||
|
||||
// Skip runtime
|
||||
if (source_index == Index.runtime.get()) continue;
|
||||
|
||||
// Skip if not a JavaScript AST
|
||||
if (source_index >= import_records_list.len) continue;
|
||||
|
||||
// Skip CSS files
|
||||
if (css_asts[source_index] != null) continue;
|
||||
|
||||
const import_records = import_records_list[source_index].slice();
|
||||
|
||||
_ = try this.validateTLA(source_index, tla_keywords, tla_checks, input_files, import_records, flags, import_records_list);
|
||||
}
|
||||
}
|
||||
|
||||
try this.scanImportsAndExports();
|
||||
|
||||
// Stop now if there were errors
|
||||
@@ -442,7 +416,7 @@ pub const LinkerContext = struct {
|
||||
const ref = this.graph.generateNewSymbol(source_index, .other, name);
|
||||
const part_index = this.graph.addPartToFile(source_index, .{
|
||||
.declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(
|
||||
this.allocator(),
|
||||
this.allocator,
|
||||
&[_]js_ast.DeclaredSymbol{
|
||||
.{ .ref = ref, .is_top_level = true },
|
||||
},
|
||||
@@ -452,13 +426,13 @@ pub const LinkerContext = struct {
|
||||
|
||||
try this.graph.generateSymbolImportAndUse(source_index, part_index, module_ref, 1, Index.init(source_index));
|
||||
var top_level = &this.graph.meta.items(.top_level_symbol_to_parts_overlay)[source_index];
|
||||
var parts_list = this.allocator().alloc(u32, 1) catch unreachable;
|
||||
var parts_list = this.allocator.alloc(u32, 1) catch unreachable;
|
||||
parts_list[0] = part_index;
|
||||
|
||||
top_level.put(this.allocator(), ref, BabyList(u32).init(parts_list)) catch unreachable;
|
||||
top_level.put(this.allocator, ref, BabyList(u32).init(parts_list)) catch unreachable;
|
||||
|
||||
var resolved_exports = &this.graph.meta.items(.resolved_exports)[source_index];
|
||||
resolved_exports.put(this.allocator(), alias, ExportData{
|
||||
resolved_exports.put(this.allocator, alias, ExportData{
|
||||
.data = ImportTracker{
|
||||
.source_index = Index.init(source_index),
|
||||
.import_ref = ref,
|
||||
@@ -494,7 +468,7 @@ pub const LinkerContext = struct {
|
||||
log.addErrorFmt(
|
||||
source,
|
||||
record.range.loc,
|
||||
this.allocator(),
|
||||
this.allocator,
|
||||
"Cannot import a \".{s}\" file into a CSS file",
|
||||
.{@tagName(loader)},
|
||||
) catch bun.outOfMemory();
|
||||
@@ -582,7 +556,7 @@ pub const LinkerContext = struct {
|
||||
// AutoBitSet needs to be initialized if it is dynamic
|
||||
if (AutoBitSet.needsDynamic(entry_points.len)) {
|
||||
for (file_entry_bits) |*bits| {
|
||||
bits.* = try AutoBitSet.initEmpty(c.allocator(), entry_points.len);
|
||||
bits.* = try AutoBitSet.initEmpty(c.allocator, entry_points.len);
|
||||
}
|
||||
} else if (file_entry_bits.len > 0) {
|
||||
// assert that the tag is correct
|
||||
@@ -619,6 +593,7 @@ pub const LinkerContext = struct {
|
||||
pub const computeCrossChunkDependencies = @import("./linker_context/computeCrossChunkDependencies.zig").computeCrossChunkDependencies;
|
||||
|
||||
pub const GenerateChunkCtx = struct {
|
||||
wg: *sync.WaitGroup,
|
||||
c: *LinkerContext,
|
||||
chunks: []Chunk,
|
||||
chunk: *Chunk,
|
||||
@@ -628,6 +603,7 @@ pub const LinkerContext = struct {
|
||||
pub const postProcessCSSChunk = @import("./linker_context/postProcessCSSChunk.zig").postProcessCSSChunk;
|
||||
pub const postProcessHTMLChunk = @import("./linker_context/postProcessHTMLChunk.zig").postProcessHTMLChunk;
|
||||
pub fn generateChunk(ctx: GenerateChunkCtx, chunk: *Chunk, chunk_index: usize) void {
|
||||
defer ctx.wg.finish();
|
||||
const worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c));
|
||||
defer worker.unget();
|
||||
switch (chunk.content) {
|
||||
@@ -640,6 +616,7 @@ pub const LinkerContext = struct {
|
||||
pub const renameSymbolsInChunk = @import("./linker_context/renameSymbolsInChunk.zig").renameSymbolsInChunk;
|
||||
|
||||
pub fn generateJSRenamer(ctx: GenerateChunkCtx, chunk: *Chunk, chunk_index: usize) void {
|
||||
defer ctx.wg.finish();
|
||||
var worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c));
|
||||
defer worker.unget();
|
||||
switch (chunk.content) {
|
||||
@@ -713,8 +690,8 @@ pub const LinkerContext = struct {
|
||||
}
|
||||
|
||||
var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + 2);
|
||||
try js_printer.quoteForJSON(path.pretty, "e_buf, false);
|
||||
j.pushStatic(quote_buf.slice()); // freed by arena
|
||||
quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false);
|
||||
j.pushStatic(quote_buf.list.items); // freed by arena
|
||||
}
|
||||
|
||||
var next_mapping_source_index: i32 = 1;
|
||||
@@ -734,8 +711,8 @@ pub const LinkerContext = struct {
|
||||
|
||||
var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + ", ".len + 2);
|
||||
quote_buf.appendAssumeCapacity(", ");
|
||||
try js_printer.quoteForJSON(path.pretty, "e_buf, false);
|
||||
j.pushStatic(quote_buf.slice()); // freed by arena
|
||||
quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false);
|
||||
j.pushStatic(quote_buf.list.items); // freed by arena
|
||||
}
|
||||
}
|
||||
|
||||
@@ -745,15 +722,85 @@ pub const LinkerContext = struct {
|
||||
);
|
||||
|
||||
const source_indices_for_contents = source_id_map.keys();
|
||||
const input_sourcemaps = c.parse_graph.input_files.items(.sourcemap);
|
||||
if (source_indices_for_contents.len > 0) {
|
||||
j.pushStatic("\n ");
|
||||
j.pushStatic(
|
||||
quoted_source_map_contents[source_indices_for_contents[0]].getConst() orelse "",
|
||||
);
|
||||
|
||||
// Check if we have an input sourcemap with sources_content
|
||||
const first_index = source_indices_for_contents[0];
|
||||
|
||||
// Try to find sourcemap - either at this index or at a related plugin file index
|
||||
var sourcemap_to_use: ?*bun.sourcemap.ParsedSourceMap = null;
|
||||
var sourcemap_index: u32 = first_index;
|
||||
|
||||
if (input_sourcemaps[first_index]) |sm| {
|
||||
sourcemap_to_use = sm;
|
||||
} else {
|
||||
// If no sourcemap at this index, check if this might be an original source index
|
||||
// and find the corresponding plugin file index
|
||||
const original_source_indices = c.parse_graph.input_files.items(.original_source_index);
|
||||
for (original_source_indices, 0..) |orig_idx, plugin_idx| {
|
||||
if (orig_idx == first_index) {
|
||||
// Found the plugin file that created this original source
|
||||
if (input_sourcemaps[plugin_idx]) |sm| {
|
||||
sourcemap_to_use = sm;
|
||||
sourcemap_index = @intCast(plugin_idx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sourcemap_to_use) |input_sourcemap| {
|
||||
if (input_sourcemap.sources_content.len > 0) {
|
||||
// Use the original source content from the input sourcemap
|
||||
const original_content = input_sourcemap.sources_content[0];
|
||||
const mutable = MutableString.initEmpty(worker.allocator);
|
||||
const quoted = (js_printer.quoteForJSON(original_content, mutable, false) catch bun.outOfMemory()).list.items;
|
||||
j.pushStatic(quoted);
|
||||
} else {
|
||||
j.pushStatic(quoted_source_map_contents[first_index]);
|
||||
}
|
||||
} else {
|
||||
j.pushStatic(quoted_source_map_contents[first_index]);
|
||||
}
|
||||
|
||||
for (source_indices_for_contents[1..]) |index| {
|
||||
j.pushStatic(",\n ");
|
||||
j.pushStatic(quoted_source_map_contents[index].getConst() orelse "");
|
||||
|
||||
// Try to find sourcemap - either at this index or at a related plugin file index
|
||||
var loop_sourcemap_to_use: ?*bun.sourcemap.ParsedSourceMap = null;
|
||||
|
||||
if (input_sourcemaps[index]) |sm| {
|
||||
loop_sourcemap_to_use = sm;
|
||||
} else {
|
||||
// If no sourcemap at this index, check if this might be an original source index
|
||||
// and find the corresponding plugin file index
|
||||
const original_source_indices = c.parse_graph.input_files.items(.original_source_index);
|
||||
for (original_source_indices, 0..) |orig_idx, plugin_idx| {
|
||||
if (orig_idx == index) {
|
||||
// Found the plugin file that created this original source
|
||||
if (input_sourcemaps[plugin_idx]) |sm| {
|
||||
loop_sourcemap_to_use = sm;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (loop_sourcemap_to_use) |input_sourcemap| {
|
||||
if (input_sourcemap.sources_content.len > 0) {
|
||||
// Use the original source content from the input sourcemap
|
||||
const original_content = input_sourcemap.sources_content[0];
|
||||
const mutable = MutableString.initEmpty(worker.allocator);
|
||||
const quoted = (js_printer.quoteForJSON(original_content, mutable, false) catch bun.outOfMemory()).list.items;
|
||||
j.pushStatic(quoted);
|
||||
} else {
|
||||
j.pushStatic(quoted_source_map_contents[index]);
|
||||
}
|
||||
} else {
|
||||
j.pushStatic(quoted_source_map_contents[index]);
|
||||
}
|
||||
}
|
||||
}
|
||||
j.pushStatic(
|
||||
@@ -773,11 +820,11 @@ pub const LinkerContext = struct {
|
||||
|
||||
var start_state = sourcemap.SourceMapState{
|
||||
.source_index = mapping_source_index,
|
||||
.generated_line = offset.lines.zeroBased(),
|
||||
.generated_column = offset.columns.zeroBased(),
|
||||
.generated_line = offset.lines,
|
||||
.generated_column = offset.columns,
|
||||
};
|
||||
|
||||
if (offset.lines.zeroBased() == 0) {
|
||||
if (offset.lines == 0) {
|
||||
start_state.generated_column += prev_column_offset;
|
||||
}
|
||||
|
||||
@@ -966,7 +1013,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
// Require of a top-level await chain is forbidden
|
||||
if (record.kind == .require) {
|
||||
var notes = std.ArrayList(Logger.Data).init(c.allocator());
|
||||
var notes = std.ArrayList(Logger.Data).init(c.allocator);
|
||||
|
||||
var tla_pretty_path: string = "";
|
||||
var other_source_index = record.source_index.get();
|
||||
@@ -981,7 +1028,7 @@ pub const LinkerContext = struct {
|
||||
const source = &input_files[other_source_index];
|
||||
tla_pretty_path = source.path.pretty;
|
||||
notes.append(Logger.Data{
|
||||
.text = std.fmt.allocPrint(c.allocator(), "The top-level await in {s} is here:", .{tla_pretty_path}) catch bun.outOfMemory(),
|
||||
.text = std.fmt.allocPrint(c.allocator, "The top-level await in {s} is here:", .{tla_pretty_path}) catch bun.outOfMemory(),
|
||||
.location = .initOrNull(source, parent_result_tla_keyword),
|
||||
}) catch bun.outOfMemory();
|
||||
break;
|
||||
@@ -997,7 +1044,7 @@ pub const LinkerContext = struct {
|
||||
other_source_index = parent_tla_check.parent;
|
||||
|
||||
try notes.append(Logger.Data{
|
||||
.text = try std.fmt.allocPrint(c.allocator(), "The file {s} imports the file {s} here:", .{
|
||||
.text = try std.fmt.allocPrint(c.allocator, "The file {s} imports the file {s} here:", .{
|
||||
input_files[parent_source_index].path.pretty,
|
||||
input_files[other_source_index].path.pretty,
|
||||
}),
|
||||
@@ -1008,9 +1055,9 @@ pub const LinkerContext = struct {
|
||||
const source: *const Logger.Source = &input_files[source_index];
|
||||
const imported_pretty_path = source.path.pretty;
|
||||
const text: string = if (strings.eql(imported_pretty_path, tla_pretty_path))
|
||||
try std.fmt.allocPrint(c.allocator(), "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path})
|
||||
try std.fmt.allocPrint(c.allocator, "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path})
|
||||
else
|
||||
try std.fmt.allocPrint(c.allocator(), "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path});
|
||||
try std.fmt.allocPrint(c.allocator, "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path});
|
||||
|
||||
try c.log.addRangeErrorWithNotes(source, record.range, text, notes.items);
|
||||
}
|
||||
@@ -1049,12 +1096,12 @@ pub const LinkerContext = struct {
|
||||
this.all_stmts.deinit();
|
||||
}
|
||||
|
||||
pub fn init(alloc: std.mem.Allocator) StmtList {
|
||||
pub fn init(allocator: std.mem.Allocator) StmtList {
|
||||
return .{
|
||||
.inside_wrapper_prefix = std.ArrayList(Stmt).init(alloc),
|
||||
.outside_wrapper_prefix = std.ArrayList(Stmt).init(alloc),
|
||||
.inside_wrapper_suffix = std.ArrayList(Stmt).init(alloc),
|
||||
.all_stmts = std.ArrayList(Stmt).init(alloc),
|
||||
.inside_wrapper_prefix = std.ArrayList(Stmt).init(allocator),
|
||||
.outside_wrapper_prefix = std.ArrayList(Stmt).init(allocator),
|
||||
.inside_wrapper_suffix = std.ArrayList(Stmt).init(allocator),
|
||||
.all_stmts = std.ArrayList(Stmt).init(allocator),
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1065,7 +1112,7 @@ pub const LinkerContext = struct {
|
||||
loc: Logger.Loc,
|
||||
namespace_ref: Ref,
|
||||
import_record_index: u32,
|
||||
alloc: std.mem.Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
ast: *const JSAst,
|
||||
) !bool {
|
||||
const record = ast.import_records.at(import_record_index);
|
||||
@@ -1082,11 +1129,11 @@ pub const LinkerContext = struct {
|
||||
S.Local,
|
||||
S.Local{
|
||||
.decls = G.Decl.List.fromSlice(
|
||||
alloc,
|
||||
allocator,
|
||||
&.{
|
||||
.{
|
||||
.binding = Binding.alloc(
|
||||
alloc,
|
||||
allocator,
|
||||
B.Identifier{
|
||||
.ref = namespace_ref,
|
||||
},
|
||||
@@ -1123,10 +1170,10 @@ pub const LinkerContext = struct {
|
||||
try stmts.inside_wrapper_prefix.append(
|
||||
Stmt.alloc(S.Local, .{
|
||||
.decls = try G.Decl.List.fromSlice(
|
||||
alloc,
|
||||
allocator,
|
||||
&.{
|
||||
.{
|
||||
.binding = Binding.alloc(alloc, B.Identifier{
|
||||
.binding = Binding.alloc(allocator, B.Identifier{
|
||||
.ref = namespace_ref,
|
||||
}, loc),
|
||||
.value = Expr.init(E.RequireString, .{
|
||||
@@ -1195,7 +1242,7 @@ pub const LinkerContext = struct {
|
||||
pub fn printCodeForFileInChunkJS(
|
||||
c: *LinkerContext,
|
||||
r: renamer.Renamer,
|
||||
alloc: std.mem.Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
writer: *js_printer.BufferWriter,
|
||||
out_stmts: []Stmt,
|
||||
ast: *const js_ast.BundledAst,
|
||||
@@ -1231,13 +1278,13 @@ pub const LinkerContext = struct {
|
||||
.print_dce_annotations = c.options.emit_dce_annotations,
|
||||
.has_run_symbol_renamer = true,
|
||||
|
||||
.allocator = alloc,
|
||||
.allocator = allocator,
|
||||
.source_map_allocator = if (c.dev_server != null and
|
||||
c.parse_graph.input_files.items(.loader)[source_index.get()].isJavaScriptLike())
|
||||
// The loader check avoids globally allocating asset source maps
|
||||
writer.buffer.allocator
|
||||
else
|
||||
alloc,
|
||||
allocator,
|
||||
.to_esm_ref = to_esm_ref,
|
||||
.to_commonjs_ref = to_commonjs_ref,
|
||||
.require_ref = switch (c.options.output_format) {
|
||||
@@ -1262,6 +1309,7 @@ pub const LinkerContext = struct {
|
||||
else
|
||||
null,
|
||||
.mangled_props = &c.mangled_props,
|
||||
.input_source_map = c.parse_graph.input_files.items(.sourcemap)[source_index.get()],
|
||||
};
|
||||
|
||||
writer.buffer.reset();
|
||||
@@ -1324,9 +1372,9 @@ pub const LinkerContext = struct {
|
||||
const all_sources: []Logger.Source = c.parse_graph.input_files.items(.source);
|
||||
|
||||
// Collect all local css names
|
||||
var sfb = std.heap.stackFallback(512, c.allocator());
|
||||
const alloc = sfb.get();
|
||||
var local_css_names = std.AutoHashMap(bun.bundle_v2.Ref, void).init(alloc);
|
||||
var sfb = std.heap.stackFallback(512, c.allocator);
|
||||
const allocator = sfb.get();
|
||||
var local_css_names = std.AutoHashMap(bun.bundle_v2.Ref, void).init(allocator);
|
||||
defer local_css_names.deinit();
|
||||
|
||||
for (all_css_asts, 0..) |maybe_css_ast, source_index| {
|
||||
@@ -1353,15 +1401,15 @@ pub const LinkerContext = struct {
|
||||
|
||||
const original_name = symbol.original_name;
|
||||
const path_hash = bun.css.css_modules.hash(
|
||||
alloc,
|
||||
allocator,
|
||||
"{s}",
|
||||
// use path relative to cwd for determinism
|
||||
.{source.path.pretty},
|
||||
false,
|
||||
);
|
||||
|
||||
const final_generated_name = std.fmt.allocPrint(c.allocator(), "{s}_{s}", .{ original_name, path_hash }) catch bun.outOfMemory();
|
||||
c.mangled_props.put(c.allocator(), ref, final_generated_name) catch bun.outOfMemory();
|
||||
const final_generated_name = std.fmt.allocPrint(c.graph.allocator, "{s}_{s}", .{ original_name, path_hash }) catch bun.outOfMemory();
|
||||
c.mangled_props.put(c.allocator, ref, final_generated_name) catch bun.outOfMemory();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1732,7 +1780,7 @@ pub const LinkerContext = struct {
|
||||
defer c.cycle_detector.shrinkRetainingCapacity(cycle_detector_top);
|
||||
|
||||
var tracker = init_tracker;
|
||||
var ambiguous_results = std.ArrayList(MatchImport).init(c.allocator());
|
||||
var ambiguous_results = std.ArrayList(MatchImport).init(c.allocator);
|
||||
defer ambiguous_results.clearAndFree();
|
||||
|
||||
var result: MatchImport = MatchImport{};
|
||||
@@ -1803,7 +1851,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeWarningFmt(
|
||||
source,
|
||||
source.rangeOfIdentifier(named_import.alias_loc.?),
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Import \"{s}\" will always be undefined because the file \"{s}\" has no exports",
|
||||
.{
|
||||
named_import.alias.?,
|
||||
@@ -1866,11 +1914,11 @@ pub const LinkerContext = struct {
|
||||
// "undefined" instead of emitting an error.
|
||||
symbol.import_item_status = .missing;
|
||||
|
||||
if (c.resolver.opts.target == .browser and jsc.ModuleLoader.HardcodedModule.Alias.has(next_source.path.pretty, .bun)) {
|
||||
if (c.resolver.opts.target == .browser and JSC.ModuleLoader.HardcodedModule.Alias.has(next_source.path.pretty, .bun)) {
|
||||
c.log.addRangeWarningFmtWithNote(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Browser polyfill for module \"{s}\" doesn't have a matching export named \"{s}\"",
|
||||
.{
|
||||
next_source.path.pretty,
|
||||
@@ -1884,7 +1932,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeWarningFmt(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Import \"{s}\" will always be undefined because there is no matching export in \"{s}\"",
|
||||
.{
|
||||
named_import.alias.?,
|
||||
@@ -1896,7 +1944,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeErrorFmtWithNote(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Browser polyfill for module \"{s}\" doesn't have a matching export named \"{s}\"",
|
||||
.{
|
||||
next_source.path.pretty,
|
||||
@@ -1910,7 +1958,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeErrorFmt(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"No matching export in \"{s}\" for import \"{s}\"",
|
||||
.{
|
||||
next_source.path.pretty,
|
||||
@@ -2051,7 +2099,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
// Generate a dummy part that depends on the "__commonJS" symbol.
|
||||
const dependencies: []js_ast.Dependency = if (c.options.output_format != .internal_bake_dev) brk: {
|
||||
const dependencies = c.allocator().alloc(js_ast.Dependency, common_js_parts.len) catch bun.outOfMemory();
|
||||
const dependencies = c.allocator.alloc(js_ast.Dependency, common_js_parts.len) catch bun.outOfMemory();
|
||||
for (common_js_parts, dependencies) |part, *cjs| {
|
||||
cjs.* = .{
|
||||
.part_index = part,
|
||||
@@ -2061,14 +2109,14 @@ pub const LinkerContext = struct {
|
||||
break :brk dependencies;
|
||||
} else &.{};
|
||||
var symbol_uses: Part.SymbolUseMap = .empty;
|
||||
symbol_uses.put(c.allocator(), wrapper_ref, .{ .count_estimate = 1 }) catch bun.outOfMemory();
|
||||
symbol_uses.put(c.allocator, wrapper_ref, .{ .count_estimate = 1 }) catch bun.outOfMemory();
|
||||
const part_index = c.graph.addPartToFile(
|
||||
source_index,
|
||||
.{
|
||||
.stmts = &.{},
|
||||
.symbol_uses = symbol_uses,
|
||||
.declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
&[_]js_ast.DeclaredSymbol{
|
||||
.{ .ref = c.graph.ast.items(.exports_ref)[source_index], .is_top_level = true },
|
||||
.{ .ref = c.graph.ast.items(.module_ref)[source_index], .is_top_level = true },
|
||||
@@ -2110,7 +2158,7 @@ pub const LinkerContext = struct {
|
||||
&.{};
|
||||
|
||||
// generate a dummy part that depends on the "__esm" symbol
|
||||
const dependencies = c.allocator().alloc(js_ast.Dependency, esm_parts.len) catch unreachable;
|
||||
const dependencies = c.allocator.alloc(js_ast.Dependency, esm_parts.len) catch unreachable;
|
||||
for (esm_parts, dependencies) |part, *esm| {
|
||||
esm.* = .{
|
||||
.part_index = part,
|
||||
@@ -2119,12 +2167,12 @@ pub const LinkerContext = struct {
|
||||
}
|
||||
|
||||
var symbol_uses: Part.SymbolUseMap = .empty;
|
||||
symbol_uses.put(c.allocator(), wrapper_ref, .{ .count_estimate = 1 }) catch bun.outOfMemory();
|
||||
symbol_uses.put(c.allocator, wrapper_ref, .{ .count_estimate = 1 }) catch bun.outOfMemory();
|
||||
const part_index = c.graph.addPartToFile(
|
||||
source_index,
|
||||
.{
|
||||
.symbol_uses = symbol_uses,
|
||||
.declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(c.allocator(), &[_]js_ast.DeclaredSymbol{
|
||||
.declared_symbols = js_ast.DeclaredSymbol.List.fromSlice(c.allocator, &[_]js_ast.DeclaredSymbol{
|
||||
.{ .ref = wrapper_ref, .is_top_level = true },
|
||||
}) catch unreachable,
|
||||
.dependencies = Dependency.List.init(dependencies),
|
||||
@@ -2280,7 +2328,7 @@ pub const LinkerContext = struct {
|
||||
imports_to_bind: *RefImportData,
|
||||
source_index: Index.Int,
|
||||
) void {
|
||||
var named_imports = named_imports_ptr.clone(c.allocator()) catch bun.outOfMemory();
|
||||
var named_imports = named_imports_ptr.clone(c.allocator) catch bun.outOfMemory();
|
||||
defer named_imports_ptr.* = named_imports;
|
||||
|
||||
const Sorter = struct {
|
||||
@@ -2304,7 +2352,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
const import_ref = ref;
|
||||
|
||||
var re_exports = std.ArrayList(js_ast.Dependency).init(c.allocator());
|
||||
var re_exports = std.ArrayList(js_ast.Dependency).init(c.allocator);
|
||||
const result = c.matchImportWithExport(.{
|
||||
.source_index = Index.source(source_index),
|
||||
.import_ref = import_ref,
|
||||
@@ -2313,7 +2361,7 @@ pub const LinkerContext = struct {
|
||||
switch (result.kind) {
|
||||
.normal => {
|
||||
imports_to_bind.put(
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
import_ref,
|
||||
.{
|
||||
.re_exports = bun.BabyList(js_ast.Dependency).init(re_exports.items),
|
||||
@@ -2332,7 +2380,7 @@ pub const LinkerContext = struct {
|
||||
},
|
||||
.normal_and_namespace => {
|
||||
imports_to_bind.put(
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
import_ref,
|
||||
.{
|
||||
.re_exports = bun.BabyList(js_ast.Dependency).init(re_exports.items),
|
||||
@@ -2354,7 +2402,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeErrorFmt(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Detected cycle while resolving import \"{s}\"",
|
||||
.{
|
||||
named_import.alias.?,
|
||||
@@ -2363,7 +2411,7 @@ pub const LinkerContext = struct {
|
||||
},
|
||||
.probably_typescript_type => {
|
||||
c.graph.meta.items(.probably_typescript_type)[source_index].put(
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
import_ref,
|
||||
{},
|
||||
) catch unreachable;
|
||||
@@ -2381,7 +2429,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeWarningFmt(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Import \"{s}\" will always be undefined because there are multiple matching exports",
|
||||
.{
|
||||
named_import.alias.?,
|
||||
@@ -2391,7 +2439,7 @@ pub const LinkerContext = struct {
|
||||
c.log.addRangeErrorFmt(
|
||||
source,
|
||||
r,
|
||||
c.allocator(),
|
||||
c.allocator,
|
||||
"Ambiguous import \"{s}\" has multiple matching exports",
|
||||
.{
|
||||
named_import.alias.?,
|
||||
@@ -2406,7 +2454,7 @@ pub const LinkerContext = struct {
|
||||
|
||||
pub fn breakOutputIntoPieces(
|
||||
c: *LinkerContext,
|
||||
alloc: std.mem.Allocator,
|
||||
allocator: std.mem.Allocator,
|
||||
j: *StringJoiner,
|
||||
count: u32,
|
||||
) !Chunk.IntermediateOutput {
|
||||
@@ -2423,12 +2471,8 @@ pub const LinkerContext = struct {
|
||||
// 4. externals
|
||||
return .{ .joiner = j.* };
|
||||
|
||||
var pieces = brk: {
|
||||
errdefer j.deinit();
|
||||
break :brk try std.ArrayList(OutputPiece).initCapacity(alloc, count);
|
||||
};
|
||||
errdefer pieces.deinit();
|
||||
const complete_output = try j.done(alloc);
|
||||
var pieces = try std.ArrayList(OutputPiece).initCapacity(allocator, count);
|
||||
const complete_output = try j.done(allocator);
|
||||
var output = complete_output;
|
||||
|
||||
const prefix = c.unique_key_prefix;
|
||||
@@ -2503,25 +2547,37 @@ pub const LinkerContext = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Ref = bun.ast.Ref;
|
||||
pub const ThreadPoolLib = bun.ThreadPool;
|
||||
pub const Ref = @import("../ast/base.zig").Ref;
|
||||
pub const ThreadPoolLib = @import("../threading.zig");
|
||||
pub const Fs = @import("../fs.zig");
|
||||
|
||||
pub const Index = bun.ast.Index;
|
||||
const debugTreeShake = Output.scoped(.TreeShake, .hidden);
|
||||
pub const Index = @import("../ast/base.zig").Index;
|
||||
const debugTreeShake = Output.scoped(.TreeShake, true);
|
||||
|
||||
pub const DeferredBatchTask = bun.bundle_v2.DeferredBatchTask;
|
||||
pub const ThreadPool = bun.bundle_v2.ThreadPool;
|
||||
pub const ParseTask = bun.bundle_v2.ParseTask;
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const NodeFallbackModules = @import("../node_fallbacks.zig");
|
||||
const js_printer = @import("../js_printer.zig");
|
||||
const lex = @import("../js_lexer.zig");
|
||||
const linker = @import("../linker.zig");
|
||||
const runtime = @import("../runtime.zig");
|
||||
const std = @import("std");
|
||||
const BabyList = @import("../collections/baby_list.zig").BabyList;
|
||||
|
||||
const js_ast = @import("../ast.zig");
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const Dependency = js_ast.Dependency;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const G = js_ast.G;
|
||||
const JSAst = js_ast.BundledAst;
|
||||
const Part = js_ast.Part;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const Logger = @import("../logger.zig");
|
||||
const Loc = Logger.Loc;
|
||||
@@ -2544,23 +2600,13 @@ const bake = bun.bake;
|
||||
const base64 = bun.base64;
|
||||
const renamer = bun.renamer;
|
||||
const sourcemap = bun.sourcemap;
|
||||
const string = bun.string;
|
||||
const strings = bun.strings;
|
||||
const sync = bun.threading;
|
||||
const sync = bun.ThreadPool;
|
||||
const AutoBitSet = bun.bit_set.AutoBitSet;
|
||||
const BabyList = bun.collections.BabyList;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const Dependency = js_ast.Dependency;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const G = js_ast.G;
|
||||
const JSAst = js_ast.BundledAst;
|
||||
const Part = js_ast.Part;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
const JSC = bun.JSC;
|
||||
const EventLoop = bun.JSC.AnyEventLoop;
|
||||
|
||||
const bundler = bun.bundle_v2;
|
||||
const AdditionalFile = bundler.AdditionalFile;
|
||||
@@ -2582,6 +2628,3 @@ const StableRef = bundler.StableRef;
|
||||
const WrapKind = bundler.WrapKind;
|
||||
const genericPathWithPrettyInitialized = bundler.genericPathWithPrettyInitialized;
|
||||
const logPartDependencyTree = bundler.logPartDependencyTree;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const EventLoop = bun.jsc.AnyEventLoop;
|
||||
|
||||
@@ -2079,6 +2079,53 @@ pub const BundleV2 = struct {
|
||||
this.graph.input_files.items(.loader)[load.source_index.get()] = code.loader;
|
||||
this.graph.input_files.items(.source)[load.source_index.get()].contents = code.source_code;
|
||||
this.graph.input_files.items(.is_plugin_file)[load.source_index.get()] = true;
|
||||
|
||||
// Parse and store the sourcemap if provided
|
||||
if (code.sourcemap) |sourcemap_str| {
|
||||
if (bun.sourcemap.parseJSON(
|
||||
bun.default_allocator,
|
||||
this.graph.allocator,
|
||||
sourcemap_str,
|
||||
.mappings_and_source_content,
|
||||
)) |parsed| {
|
||||
if (parsed.map) |map| {
|
||||
this.graph.input_files.items(.sourcemap)[load.source_index.get()] = map;
|
||||
|
||||
// If we have sources_content, create a new InputFile for the original source
|
||||
if (map.sources_content.len > 0 and map.sources_content[0].len > 0) {
|
||||
const original_source_index = @as(u32, @intCast(this.graph.input_files.len));
|
||||
|
||||
// Copy the current source but with the original content
|
||||
const current_source = &this.graph.input_files.items(.source)[load.source_index.get()];
|
||||
|
||||
// Create a new InputFile for the original source
|
||||
this.graph.input_files.append(this.graph.allocator, .{
|
||||
.source = Logger.Source{
|
||||
.path = current_source.path,
|
||||
.contents = map.sources_content[0],
|
||||
.index = Index.init(original_source_index),
|
||||
},
|
||||
.loader = this.graph.input_files.items(.loader)[load.source_index.get()],
|
||||
.side_effects = this.graph.input_files.items(.side_effects)[load.source_index.get()],
|
||||
.allocator = this.graph.allocator,
|
||||
.is_plugin_file = true,
|
||||
}) catch bun.outOfMemory();
|
||||
|
||||
// Also append an empty AST for this input file
|
||||
this.graph.ast.append(this.graph.allocator, JSAst.empty) catch bun.outOfMemory();
|
||||
|
||||
// Set the original_source_index on the current file
|
||||
this.graph.input_files.items(.original_source_index)[load.source_index.get()] = original_source_index;
|
||||
}
|
||||
}
|
||||
} else |err| {
|
||||
log.addWarningFmt(&this.graph.input_files.items(.source)[load.source_index.get()], Logger.Loc.Empty, bun.default_allocator, "Failed to parse sourcemap from plugin: {s}", .{@errorName(err)}) catch {};
|
||||
this.graph.input_files.items(.sourcemap)[load.source_index.get()] = null;
|
||||
}
|
||||
// Free the sourcemap string since we've parsed it
|
||||
if (!should_copy_for_bundling) this.free_list.append(sourcemap_str) catch unreachable;
|
||||
}
|
||||
|
||||
var parse_task = load.parse_task;
|
||||
parse_task.loader = code.loader;
|
||||
if (!should_copy_for_bundling) this.free_list.append(code.source_code) catch unreachable;
|
||||
|
||||
@@ -12,6 +12,7 @@ interface BundlerPlugin {
|
||||
internalID,
|
||||
sourceCode: string | Uint8Array | ArrayBuffer | DataView | null,
|
||||
loaderKey: number | null,
|
||||
sourcemap?: string | Uint8Array | ArrayBuffer | DataView | null,
|
||||
): void;
|
||||
/** Binding to `JSBundlerPlugin__onResolveAsync` */
|
||||
onResolveAsync(internalID, a, b, c): void;
|
||||
@@ -490,7 +491,7 @@ export function runOnLoadPlugins(
|
||||
continue;
|
||||
}
|
||||
|
||||
var { contents, loader = defaultLoader } = result as any;
|
||||
var { contents, loader = defaultLoader, sourcemap } = result as any;
|
||||
if ((loader as any) === "object") {
|
||||
if (!("exports" in result)) {
|
||||
throw new TypeError('onLoad plugin returning loader: "object" must have "exports" property');
|
||||
@@ -511,12 +512,19 @@ export function runOnLoadPlugins(
|
||||
throw new TypeError('onLoad plugins must return an object with "loader" as a string');
|
||||
}
|
||||
|
||||
// Validate sourcemap if provided
|
||||
if (sourcemap !== undefined && sourcemap !== null) {
|
||||
if (!(typeof sourcemap === "string") && !$isTypedArrayView(sourcemap)) {
|
||||
throw new TypeError('onLoad plugin "sourcemap" must be a string or Uint8Array');
|
||||
}
|
||||
}
|
||||
|
||||
const chosenLoader = LOADERS_MAP[loader];
|
||||
if (chosenLoader === undefined) {
|
||||
throw new TypeError(`Loader ${loader} is not supported.`);
|
||||
}
|
||||
|
||||
this.onLoadAsync(internalID, contents as any, chosenLoader);
|
||||
this.onLoadAsync(internalID, contents as any, chosenLoader, sourcemap);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -428,6 +428,7 @@ pub const Options = struct {
|
||||
line_offset_tables: ?SourceMap.LineOffsetTable.List = null,
|
||||
|
||||
mangled_props: ?*const bun.bundle_v2.MangledProps,
|
||||
input_source_map: ?*bun.sourcemap.ParsedSourceMap = null,
|
||||
|
||||
// Default indentation is 2 spaces
|
||||
pub const Indentation = struct {
|
||||
@@ -5627,6 +5628,7 @@ pub fn getSourceMapBuilder(
|
||||
opts: Options,
|
||||
source: *const logger.Source,
|
||||
tree: *const Ast,
|
||||
input_source_map: ?*bun.sourcemap.ParsedSourceMap,
|
||||
) SourceMap.Chunk.Builder {
|
||||
if (comptime generate_source_map == .disable)
|
||||
return undefined;
|
||||
@@ -5639,6 +5641,7 @@ pub fn getSourceMapBuilder(
|
||||
.cover_lines_without_mappings = true,
|
||||
.approximate_input_line_count = tree.approximate_newline_count,
|
||||
.prepend_count = is_bun_platform and generate_source_map == .lazy,
|
||||
.input_source_map = input_source_map,
|
||||
.line_offset_tables = opts.line_offset_tables orelse brk: {
|
||||
if (generate_source_map == .lazy) break :brk SourceMap.LineOffsetTable.generate(
|
||||
opts.source_map_allocator orelse opts.allocator,
|
||||
@@ -5753,7 +5756,7 @@ pub fn printAst(
|
||||
tree.import_records.slice(),
|
||||
opts,
|
||||
renamer,
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, ascii_only, opts, source, &tree),
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, ascii_only, opts, source, &tree, opts.input_source_map),
|
||||
);
|
||||
defer {
|
||||
if (comptime generate_source_map) {
|
||||
@@ -5952,7 +5955,7 @@ pub fn printWithWriterAndPlatform(
|
||||
import_records,
|
||||
opts,
|
||||
renamer,
|
||||
getSourceMapBuilder(if (generate_source_maps) .eager else .disable, is_bun_platform, opts, source, &ast),
|
||||
getSourceMapBuilder(if (generate_source_maps) .eager else .disable, is_bun_platform, opts, source, &ast, opts.input_source_map),
|
||||
);
|
||||
printer.was_lazy_export = ast.has_lazy_export;
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
@@ -6035,7 +6038,7 @@ pub fn printCommonJS(
|
||||
tree.import_records.slice(),
|
||||
opts,
|
||||
renamer.toRenamer(),
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, false, opts, source, &tree),
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, false, opts, source, &tree, opts.input_source_map),
|
||||
);
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
|
||||
@@ -35,6 +35,8 @@ pub const ParseUrlResultHint = union(enum) {
|
||||
column: i32,
|
||||
include_names: bool = false,
|
||||
},
|
||||
/// Parse mappings and also store sources_content in the ParsedSourceMap
|
||||
mappings_and_source_content,
|
||||
};
|
||||
|
||||
pub const ParseUrl = struct {
|
||||
@@ -210,6 +212,18 @@ pub fn parseJSON(
|
||||
|
||||
const ptr = bun.new(ParsedSourceMap, map_data);
|
||||
ptr.external_source_names = source_paths_slice.?;
|
||||
// Store sources_content if requested
|
||||
if (hint == .mappings_and_source_content) {
|
||||
const content_slice = alloc.alloc([]const u8, sources_content.items.len) catch bun.outOfMemory();
|
||||
for (sources_content.items.slice(), 0..) |item, idx| {
|
||||
if (item.data != .e_string) {
|
||||
content_slice[idx] = "";
|
||||
} else {
|
||||
content_slice[idx] = alloc.dupe(u8, item.data.e_string.string(alloc) catch "") catch bun.outOfMemory();
|
||||
}
|
||||
}
|
||||
ptr.sources_content = content_slice;
|
||||
}
|
||||
|
||||
break :map ptr;
|
||||
} else null;
|
||||
@@ -223,9 +237,10 @@ pub fn parseJSON(
|
||||
break :brk .{ mapping, std.math.cast(u32, mapping.source_index) };
|
||||
},
|
||||
.mappings_only => .{ null, null },
|
||||
.mappings_and_source_content => .{ null, null },
|
||||
};
|
||||
|
||||
const content_slice: ?[]const u8 = if (hint != .mappings_only and
|
||||
const content_slice: ?[]const u8 = if (hint != .mappings_only and hint != .mappings_and_source_content and
|
||||
source_index != null and
|
||||
source_index.? < sources_content.items.len)
|
||||
content: {
|
||||
@@ -876,6 +891,9 @@ pub const ParsedSourceMap = struct {
|
||||
/// loaded without transpilation but with external sources. This array
|
||||
/// maps `source_index` to the correct filename.
|
||||
external_source_names: []const []const u8 = &.{},
|
||||
/// Sources content from the original sourcemap, indexed the same as external_source_names
|
||||
/// Only populated when parsing sourcemaps from onLoad plugins
|
||||
sources_content: []const []const u8 = &.{},
|
||||
/// In order to load source contents from a source-map after the fact,
|
||||
/// a handle to the underlying source provider is stored. Within this pointer,
|
||||
/// a flag is stored if it is known to be an inline or external source map.
|
||||
@@ -951,6 +969,12 @@ pub const ParsedSourceMap = struct {
|
||||
allocator.free(this.external_source_names);
|
||||
}
|
||||
|
||||
if (this.sources_content.len > 0) {
|
||||
for (this.sources_content) |content|
|
||||
allocator.free(content);
|
||||
allocator.free(this.sources_content);
|
||||
}
|
||||
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
@@ -1764,6 +1788,7 @@ pub const Chunk = struct {
|
||||
pub fn NewBuilder(comptime SourceMapFormatType: type) type {
|
||||
return struct {
|
||||
const ThisBuilder = @This();
|
||||
input_source_map: ?*ParsedSourceMap = null,
|
||||
source_map: SourceMapper,
|
||||
line_offset_tables: LineOffsetTable.List = .{},
|
||||
prev_state: SourceMapState = SourceMapState{},
|
||||
@@ -1877,7 +1902,17 @@ pub const Chunk = struct {
|
||||
b.last_generated_update = @as(u32, @truncate(output.len));
|
||||
}
|
||||
|
||||
pub fn appendMapping(b: *ThisBuilder, current_state: SourceMapState) void {
|
||||
pub fn appendMapping(b: *ThisBuilder, current_state_: SourceMapState) void {
|
||||
var current_state = current_state_;
|
||||
// If the input file had a source map, map all the way back to the original
|
||||
if (b.input_source_map) |input| {
|
||||
if (Mapping.find(input.mappings, current_state.original_line, current_state.original_column)) |mapping| {
|
||||
current_state.source_index = mapping.source_index;
|
||||
current_state.original_line = mapping.original.lines;
|
||||
current_state.original_column = mapping.original.columns;
|
||||
}
|
||||
}
|
||||
|
||||
b.appendMappingWithoutRemapping(current_state);
|
||||
}
|
||||
|
||||
|
||||
381
test/bundler/bundler-plugin-sourcemap.test.ts
Normal file
381
test/bundler/bundler-plugin-sourcemap.test.ts
Normal file
@@ -0,0 +1,381 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { SourceMapConsumer } from "source-map";
|
||||
import { itBundled } from "./expectBundled";
|
||||
|
||||
// Direct test to verify implementation works
|
||||
test("onLoad plugin sourcemap support - basic", async () => {
|
||||
const result = await Bun.build({
|
||||
entrypoints: [import.meta.dir + "/test-entry.js"],
|
||||
outdir: import.meta.dir + "/out",
|
||||
sourcemap: "external",
|
||||
plugins: [
|
||||
{
|
||||
name: "sourcemap-test",
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
|
||||
return {
|
||||
path: args.path,
|
||||
namespace: "transformed",
|
||||
};
|
||||
});
|
||||
|
||||
build.onLoad({ filter: /.*/, namespace: "transformed" }, () => {
|
||||
const code = `console.log("transformed");`;
|
||||
// Create a more complete sourcemap with actual mappings
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["original.js"],
|
||||
sourcesContent: [`console.log("original");`],
|
||||
names: ["console", "log"],
|
||||
// This mapping says: first segment at (0,0) in generated maps to (0,0) in source 0
|
||||
mappings: "AAAA",
|
||||
});
|
||||
return {
|
||||
contents: code,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
root: import.meta.dir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.outputs.length).toBeGreaterThan(0);
|
||||
|
||||
// Check for sourcemap output
|
||||
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
|
||||
expect(sourcemapOutput).toBeDefined();
|
||||
});
|
||||
|
||||
// Test with TypeScript-like transformation
|
||||
test("onLoad plugin sourcemap support - typescript", async () => {
|
||||
const result = await Bun.build({
|
||||
entrypoints: [import.meta.dir + "/test-entry.js"],
|
||||
outdir: import.meta.dir + "/out2",
|
||||
sourcemap: "external",
|
||||
minify: false,
|
||||
plugins: [
|
||||
{
|
||||
name: "typescript-transform",
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
|
||||
return {
|
||||
path: "virtual.ts",
|
||||
namespace: "typescript",
|
||||
};
|
||||
});
|
||||
|
||||
build.onLoad({ filter: /.*/, namespace: "typescript" }, () => {
|
||||
// Simulate TypeScript source
|
||||
const originalCode = `function greet(name: string): void {
|
||||
console.log("Hello, " + name);
|
||||
}
|
||||
greet("World");`;
|
||||
|
||||
// Transpiled JavaScript
|
||||
const transpiledCode = `function greet(name) {
|
||||
console.log("Hello, " + name);
|
||||
}
|
||||
greet("World");`;
|
||||
|
||||
// A proper sourcemap for this transformation
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["virtual.ts"],
|
||||
sourcesContent: [originalCode],
|
||||
names: ["greet", "name", "console", "log"],
|
||||
// Simple mapping like the working test
|
||||
mappings: "AAAA",
|
||||
});
|
||||
|
||||
return {
|
||||
contents: transpiledCode,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
root: import.meta.dir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Check the generated sourcemap
|
||||
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
|
||||
expect(sourcemapOutput).toBeDefined();
|
||||
|
||||
const sourcemapText = await sourcemapOutput!.text();
|
||||
const sourcemap = JSON.parse(sourcemapText);
|
||||
|
||||
// Should preserve the TypeScript source (with namespace prefix)
|
||||
expect(sourcemap.sources[0]).toBe("typescript:virtual.ts");
|
||||
expect(sourcemap.sourcesContent).toBeDefined();
|
||||
|
||||
// Verify the original TypeScript source is preserved
|
||||
expect(sourcemap.sourcesContent[0]).toContain("function greet(name: string): void");
|
||||
expect(sourcemap.version).toBe(3);
|
||||
expect(sourcemap.mappings).toBeDefined();
|
||||
expect(sourcemap.mappings.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// Test that verifies sourcemap mappings are working
|
||||
test("onLoad plugin sourcemap remapping", async () => {
|
||||
const result = await Bun.build({
|
||||
entrypoints: [import.meta.dir + "/test-entry.js"],
|
||||
outdir: import.meta.dir + "/out3",
|
||||
sourcemap: "external",
|
||||
minify: false,
|
||||
plugins: [
|
||||
{
|
||||
name: "sourcemap-remap-test",
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
|
||||
return {
|
||||
path: "code.ts",
|
||||
namespace: "transform",
|
||||
};
|
||||
});
|
||||
|
||||
build.onLoad({ filter: /.*/, namespace: "transform" }, () => {
|
||||
// Original TypeScript-like code
|
||||
const originalCode = `// Original comment
|
||||
function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
||||
console.log(add(1, 2));`;
|
||||
|
||||
// Transpiled JavaScript (simulating TypeScript output)
|
||||
const transpiledCode = `// Original comment
|
||||
function add(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
console.log(add(1, 2));`;
|
||||
|
||||
// This sourcemap maps the transpiled code back to the original
|
||||
// Line 1 (comment) maps to line 1
|
||||
// Line 2 (function) maps to line 2
|
||||
// etc.
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["code.ts"],
|
||||
sourcesContent: [originalCode],
|
||||
names: ["add", "a", "b", "console", "log"],
|
||||
// Simple 1:1 line mapping
|
||||
mappings: "AAAA;AACA;AACA;AACA;AACA",
|
||||
});
|
||||
|
||||
return {
|
||||
contents: transpiledCode,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
root: import.meta.dir,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
|
||||
expect(sourcemapOutput).toBeDefined();
|
||||
|
||||
const sourcemapText = await sourcemapOutput!.text();
|
||||
const sourcemap = JSON.parse(sourcemapText);
|
||||
|
||||
// Use source-map library to verify mappings work
|
||||
const consumer = await new SourceMapConsumer(sourcemap);
|
||||
|
||||
// Check that we can map from generated position back to original
|
||||
// The function "add" should be on line 2 in both files due to our simple mapping
|
||||
const originalPos = consumer.originalPositionFor({
|
||||
line: 2,
|
||||
column: 9, // "add" in "function add"
|
||||
});
|
||||
|
||||
// Should map back to the TypeScript file
|
||||
expect(originalPos.source).toContain("code.ts");
|
||||
expect(originalPos.line).toBe(2);
|
||||
|
||||
consumer.destroy();
|
||||
});
|
||||
|
||||
describe("bundler", () => {
|
||||
describe("onLoad sourcemap", () => {
|
||||
itBundled("plugin/SourcemapString", {
|
||||
files: {
|
||||
"index.js": `import "./test.transformed.js";`,
|
||||
},
|
||||
plugins(builder) {
|
||||
builder.onLoad({ filter: /\.transformed\.js$/ }, () => {
|
||||
// Simulate a TypeScript-like transformation
|
||||
const originalCode = `function greet(name: string) {
|
||||
console.log("Hello, " + name);
|
||||
}
|
||||
greet("World");`;
|
||||
|
||||
const transformedCode = `function greet(name) {
|
||||
console.log("Hello, " + name);
|
||||
}
|
||||
greet("World");`;
|
||||
|
||||
// A simple sourcemap that maps line 1 of transformed to line 1 of original
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["transformed.ts"],
|
||||
sourcesContent: [originalCode],
|
||||
names: ["greet", "name", "console", "log"],
|
||||
mappings: "AAAA,SAASA,MAAMC,MACbC,QAAQC,IAAI,UAAYD,MAE1BF,MAAM",
|
||||
});
|
||||
|
||||
return {
|
||||
contents: transformedCode,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
},
|
||||
outdir: "/out",
|
||||
sourcemap: "external",
|
||||
onAfterBundle(api) {
|
||||
// Check that sourcemap was generated
|
||||
const sourcemapFile = api.outputs.find(f => f.path.endsWith(".js.map"));
|
||||
if (!sourcemapFile) {
|
||||
throw new Error("Expected sourcemap file to be generated");
|
||||
}
|
||||
|
||||
const sourcemap = JSON.parse(sourcemapFile.text);
|
||||
if (sourcemap.version !== 3) {
|
||||
throw new Error("Expected sourcemap version 3");
|
||||
}
|
||||
if (!sourcemap.sources.includes("transformed.ts")) {
|
||||
throw new Error("Expected sourcemap to contain transformed.ts source");
|
||||
}
|
||||
if (!sourcemap.sourcesContent?.[0]?.includes("function greet(name: string)")) {
|
||||
throw new Error("Expected sourcemap to contain original TypeScript source");
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
itBundled("plugin/SourcemapTypedArray", {
|
||||
files: {
|
||||
"index.js": `import "./test.transformed.js";`,
|
||||
},
|
||||
plugins(builder) {
|
||||
builder.onLoad({ filter: /\.transformed\.js$/ }, () => {
|
||||
const code = `console.log("transformed");`;
|
||||
const sourcemap = new TextEncoder().encode(
|
||||
JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["original.js"],
|
||||
sourcesContent: [`console.log("original");`],
|
||||
names: ["console", "log"],
|
||||
mappings: "AAAA",
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
contents: code,
|
||||
loader: "js",
|
||||
sourcemap: new Uint8Array(sourcemap),
|
||||
};
|
||||
});
|
||||
},
|
||||
sourcemap: "inline",
|
||||
onAfterBundle(api) {
|
||||
const output = api.outputs[0];
|
||||
// Check for inline sourcemap
|
||||
if (!output.text.includes("//# sourceMappingURL=data:")) {
|
||||
throw new Error("Expected inline sourcemap");
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
itBundled("plugin/SourcemapInvalid", {
|
||||
files: {
|
||||
"index.js": `import "./test.transformed.js";`,
|
||||
},
|
||||
plugins(builder) {
|
||||
builder.onLoad({ filter: /\.transformed\.js$/ }, () => {
|
||||
return {
|
||||
contents: `console.log("test");`,
|
||||
loader: "js",
|
||||
sourcemap: "not a valid sourcemap",
|
||||
};
|
||||
});
|
||||
},
|
||||
sourcemap: "external",
|
||||
bundleWarnings: {
|
||||
"/test.transformed.js": ["Failed to parse sourcemap from plugin: InvalidJSON"],
|
||||
},
|
||||
});
|
||||
|
||||
itBundled("plugin/SourcemapPreservesOriginal", {
|
||||
files: {
|
||||
"index.js": `import "./user.ts";`,
|
||||
},
|
||||
plugins(builder) {
|
||||
// First transformation: TypeScript -> JavaScript
|
||||
builder.onLoad({ filter: /\.ts$/ }, () => {
|
||||
const tsCode = `interface User {
|
||||
name: string;
|
||||
age: number;
|
||||
}
|
||||
|
||||
function greet(user: User): void {
|
||||
console.log(\`Hello, \${user.name}! You are \${user.age} years old.\`);
|
||||
}
|
||||
|
||||
const john: User = { name: "John", age: 30 };
|
||||
greet(john);`;
|
||||
|
||||
const jsCode = `function greet(user) {
|
||||
console.log(\`Hello, \${user.name}! You are \${user.age} years old.\`);
|
||||
}
|
||||
|
||||
const john = { name: "John", age: 30 };
|
||||
greet(john);`;
|
||||
|
||||
// Simplified sourcemap for the transformation
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: ["user.ts"],
|
||||
sourcesContent: [tsCode],
|
||||
names: ["greet", "user", "console", "log", "name", "age", "john"],
|
||||
mappings:
|
||||
"AAIA,SAASA,MAAMC,OACbC,QAAQC,IAAI,WAAWF,KAAKG,aAAaH,KAAKI,eAGhD,MAAMC,MAAQ,CAAEF,KAAM,OAAQC,IAAK,IACnCL,MAAMM",
|
||||
});
|
||||
|
||||
return {
|
||||
contents: jsCode,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
},
|
||||
outdir: "/out",
|
||||
sourcemap: "external",
|
||||
onAfterBundle(api) {
|
||||
const sourcemapFile = api.outputs.find(f => f.path.endsWith(".js.map"));
|
||||
if (!sourcemapFile) {
|
||||
throw new Error("Expected sourcemap file to be generated");
|
||||
}
|
||||
|
||||
const sourcemap = JSON.parse(sourcemapFile.text);
|
||||
// Should preserve the original TypeScript source
|
||||
if (!sourcemap.sources.includes("user.ts")) {
|
||||
throw new Error("Expected sourcemap to contain user.ts");
|
||||
}
|
||||
if (!sourcemap.sourcesContent?.[0]?.includes("interface User")) {
|
||||
throw new Error("Expected sourcemap to contain original TypeScript source");
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
1
test/bundler/test-entry.js
Normal file
1
test/bundler/test-entry.js
Normal file
@@ -0,0 +1 @@
|
||||
import "./test.transformed.js";
|
||||
Reference in New Issue
Block a user