[autofix.ci] apply automated fixes

This commit is contained in:
autofix-ci[bot]
2025-08-23 06:45:54 +00:00
committed by GitHub
parent 7a421cb4bb
commit 75bb5da560
5 changed files with 248 additions and 232 deletions

View File

@@ -999,7 +999,7 @@ pub const JSBundler = struct {
@panic("Unexpected: source_code is not a string");
};
const sourcemap = if (!sourcemap_value.isEmptyOrUndefinedOrNull())
bun.JSC.Node.StringOrBuffer.fromJSToOwnedSlice(global, sourcemap_value, bun.default_allocator) catch |err| {
switch (err) {
@@ -1013,7 +1013,7 @@ pub const JSBundler = struct {
}
else
null;
this.value = .{
.success = .{
.loader = options.Loader.fromAPI(loader),

View File

@@ -161,7 +161,7 @@ pub const LinkerContext = struct {
&this.parse_graph.input_files.items(.source)[original_source_index]
else
&this.parse_graph.input_files.items(.source)[source_index];
const mutable = MutableString.initEmpty(allocator);
quoted_source_contents.* = (js_printer.quoteForJSON(source.contents, mutable, false) catch bun.outOfMemory()).list.items;
}
@@ -405,12 +405,12 @@ pub const LinkerContext = struct {
this.parse_graph.heap.helpCatchMemoryIssues();
}
pub const computeChunks = @import("linker_context/computeChunks.zig").computeChunks;
pub const computeChunks = @import("./linker_context/computeChunks.zig").computeChunks;
pub const findAllImportedPartsInJSOrder = @import("linker_context/findAllImportedPartsInJSOrder.zig").findAllImportedPartsInJSOrder;
pub const findImportedPartsInJSOrder = @import("linker_context/findAllImportedPartsInJSOrder.zig").findImportedPartsInJSOrder;
pub const findImportedFilesInCSSOrder = @import("linker_context/findImportedFilesInCSSOrder.zig").findImportedFilesInCSSOrder;
pub const findImportedCSSFilesInJSOrder = @import("linker_context/findImportedCSSFilesInJSOrder.zig").findImportedCSSFilesInJSOrder;
pub const findAllImportedPartsInJSOrder = @import("./linker_context/findAllImportedPartsInJSOrder.zig").findAllImportedPartsInJSOrder;
pub const findImportedPartsInJSOrder = @import("./linker_context/findAllImportedPartsInJSOrder.zig").findImportedPartsInJSOrder;
pub const findImportedFilesInCSSOrder = @import("./linker_context/findImportedFilesInCSSOrder.zig").findImportedFilesInCSSOrder;
pub const findImportedCSSFilesInJSOrder = @import("./linker_context/findImportedCSSFilesInJSOrder.zig").findImportedCSSFilesInJSOrder;
pub fn generateNamedExportInFile(this: *LinkerContext, source_index: Index.Int, module_ref: Ref, name: []const u8, alias: []const u8) !struct { Ref, u32 } {
const ref = this.graph.generateNewSymbol(source_index, .other, name);
@@ -441,10 +441,10 @@ pub const LinkerContext = struct {
return .{ ref, part_index };
}
pub const generateCodeForLazyExport = @import("linker_context/generateCodeForLazyExport.zig").generateCodeForLazyExport;
pub const scanImportsAndExports = @import("linker_context/scanImportsAndExports.zig").scanImportsAndExports;
pub const doStep5 = @import("linker_context/doStep5.zig").doStep5;
pub const createExportsForFile = @import("linker_context/doStep5.zig").createExportsForFile;
pub const generateCodeForLazyExport = @import("./linker_context/generateCodeForLazyExport.zig").generateCodeForLazyExport;
pub const scanImportsAndExports = @import("./linker_context/scanImportsAndExports.zig").scanImportsAndExports;
pub const doStep5 = @import("./linker_context/doStep5.zig").doStep5;
pub const createExportsForFile = @import("./linker_context/doStep5.zig").createExportsForFile;
pub fn scanCSSImports(
this: *LinkerContext,
@@ -590,7 +590,7 @@ pub const LinkerContext = struct {
pub const Map = std.AutoArrayHashMap(Ref, void);
};
pub const computeCrossChunkDependencies = @import("linker_context/computeCrossChunkDependencies.zig").computeCrossChunkDependencies;
pub const computeCrossChunkDependencies = @import("./linker_context/computeCrossChunkDependencies.zig").computeCrossChunkDependencies;
pub const GenerateChunkCtx = struct {
wg: *sync.WaitGroup,
@@ -599,9 +599,9 @@ pub const LinkerContext = struct {
chunk: *Chunk,
};
pub const postProcessJSChunk = @import("linker_context/postProcessJSChunk.zig").postProcessJSChunk;
pub const postProcessCSSChunk = @import("linker_context/postProcessCSSChunk.zig").postProcessCSSChunk;
pub const postProcessHTMLChunk = @import("linker_context/postProcessHTMLChunk.zig").postProcessHTMLChunk;
pub const postProcessJSChunk = @import("./linker_context/postProcessJSChunk.zig").postProcessJSChunk;
pub const postProcessCSSChunk = @import("./linker_context/postProcessCSSChunk.zig").postProcessCSSChunk;
pub const postProcessHTMLChunk = @import("./linker_context/postProcessHTMLChunk.zig").postProcessHTMLChunk;
pub fn generateChunk(ctx: GenerateChunkCtx, chunk: *Chunk, chunk_index: usize) void {
defer ctx.wg.finish();
const worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c));
@@ -613,7 +613,7 @@ pub const LinkerContext = struct {
}
}
pub const renameSymbolsInChunk = @import("linker_context/renameSymbolsInChunk.zig").renameSymbolsInChunk;
pub const renameSymbolsInChunk = @import("./linker_context/renameSymbolsInChunk.zig").renameSymbolsInChunk;
pub fn generateJSRenamer(ctx: GenerateChunkCtx, chunk: *Chunk, chunk_index: usize) void {
defer ctx.wg.finish();
@@ -635,13 +635,13 @@ pub const LinkerContext = struct {
) catch @panic("TODO: handle error");
}
pub const generateChunksInParallel = @import("linker_context/generateChunksInParallel.zig").generateChunksInParallel;
pub const generateCompileResultForJSChunk = @import("linker_context/generateCompileResultForJSChunk.zig").generateCompileResultForJSChunk;
pub const generateCompileResultForCssChunk = @import("linker_context/generateCompileResultForCssChunk.zig").generateCompileResultForCssChunk;
pub const generateCompileResultForHtmlChunk = @import("linker_context/generateCompileResultForHtmlChunk.zig").generateCompileResultForHtmlChunk;
pub const generateChunksInParallel = @import("./linker_context/generateChunksInParallel.zig").generateChunksInParallel;
pub const generateCompileResultForJSChunk = @import("./linker_context/generateCompileResultForJSChunk.zig").generateCompileResultForJSChunk;
pub const generateCompileResultForCssChunk = @import("./linker_context/generateCompileResultForCssChunk.zig").generateCompileResultForCssChunk;
pub const generateCompileResultForHtmlChunk = @import("./linker_context/generateCompileResultForHtmlChunk.zig").generateCompileResultForHtmlChunk;
pub const prepareCssAstsForChunk = @import("linker_context/prepareCssAstsForChunk.zig").prepareCssAstsForChunk;
pub const PrepareCssAstTask = @import("linker_context/prepareCssAstsForChunk.zig").PrepareCssAstTask;
pub const prepareCssAstsForChunk = @import("./linker_context/prepareCssAstsForChunk.zig").prepareCssAstsForChunk;
pub const PrepareCssAstTask = @import("./linker_context/prepareCssAstsForChunk.zig").PrepareCssAstTask;
pub fn generateSourceMapForChunk(
c: *LinkerContext,
@@ -725,14 +725,14 @@ pub const LinkerContext = struct {
const input_sourcemaps = c.parse_graph.input_files.items(.sourcemap);
if (source_indices_for_contents.len > 0) {
j.pushStatic("\n ");
// Check if we have an input sourcemap with sources_content
const first_index = source_indices_for_contents[0];
// Try to find sourcemap - either at this index or at a related plugin file index
var sourcemap_to_use: ?*bun.sourcemap.ParsedSourceMap = null;
var sourcemap_index: u32 = first_index;
if (input_sourcemaps[first_index]) |sm| {
sourcemap_to_use = sm;
} else {
@@ -750,7 +750,7 @@ pub const LinkerContext = struct {
}
}
}
if (sourcemap_to_use) |input_sourcemap| {
if (input_sourcemap.sources_content.len > 0) {
// Use the original source content from the input sourcemap
@@ -767,10 +767,10 @@ pub const LinkerContext = struct {
for (source_indices_for_contents[1..]) |index| {
j.pushStatic(",\n ");
// Try to find sourcemap - either at this index or at a related plugin file index
var loop_sourcemap_to_use: ?*bun.sourcemap.ParsedSourceMap = null;
if (input_sourcemaps[index]) |sm| {
loop_sourcemap_to_use = sm;
} else {
@@ -787,7 +787,7 @@ pub const LinkerContext = struct {
}
}
}
if (loop_sourcemap_to_use) |input_sourcemap| {
if (input_sourcemap.sources_content.len > 0) {
// Use the original source content from the input sourcemap
@@ -1230,14 +1230,14 @@ pub const LinkerContext = struct {
return true;
}
pub const convertStmtsForChunk = @import("linker_context/convertStmtsForChunk.zig").convertStmtsForChunk;
pub const convertStmtsForChunkForDevServer = @import("linker_context/convertStmtsForChunkForDevServer.zig").convertStmtsForChunkForDevServer;
pub const convertStmtsForChunk = @import("./linker_context/convertStmtsForChunk.zig").convertStmtsForChunk;
pub const convertStmtsForChunkForDevServer = @import("./linker_context/convertStmtsForChunkForDevServer.zig").convertStmtsForChunkForDevServer;
pub fn runtimeFunction(c: *LinkerContext, name: []const u8) Ref {
return c.graph.runtimeFunction(name);
}
pub const generateCodeForFileInChunkJS = @import("linker_context/generateCodeForFileInChunkJS.zig").generateCodeForFileInChunkJS;
pub const generateCodeForFileInChunkJS = @import("./linker_context/generateCodeForFileInChunkJS.zig").generateCodeForFileInChunkJS;
pub fn printCodeForFileInChunkJS(
c: *LinkerContext,
@@ -1470,7 +1470,7 @@ pub const LinkerContext = struct {
hash.write(std.mem.asBytes(&chunk.isolated_hash));
}
pub const writeOutputFilesToDisk = @import("linker_context/writeOutputFilesToDisk.zig").writeOutputFilesToDisk;
pub const writeOutputFilesToDisk = @import("./linker_context/writeOutputFilesToDisk.zig").writeOutputFilesToDisk;
// Sort cross-chunk exports by chunk name for determinism
pub fn sortedCrossChunkExportItems(
@@ -2547,77 +2547,84 @@ pub const LinkerContext = struct {
}
};
const bun = @import("bun");
const string = bun.string;
const Output = bun.Output;
const Environment = bun.Environment;
const strings = bun.strings;
const MutableString = bun.MutableString;
const FeatureFlags = bun.FeatureFlags;
const std = @import("std");
const lex = @import("../js_lexer.zig");
const Logger = @import("../logger.zig");
const options = @import("../options.zig");
const Part = js_ast.Part;
const js_printer = @import("../js_printer.zig");
const js_ast = @import("../ast.zig");
const linker = @import("../linker.zig");
const sourcemap = bun.sourcemap;
const StringJoiner = bun.StringJoiner;
const base64 = bun.base64;
pub const Ref = @import("../ast/base.zig").Ref;
pub const ThreadPoolLib = @import("../threading.zig");
const BabyList = @import("../collections/baby_list.zig").BabyList;
pub const Fs = @import("../fs.zig");
const _resolver = @import("../resolver/resolver.zig");
const sync = bun.ThreadPool;
const ImportRecord = bun.ImportRecord;
const runtime = @import("../runtime.zig");
const NodeFallbackModules = @import("../node_fallbacks.zig");
const Resolver = _resolver.Resolver;
const Dependency = js_ast.Dependency;
const JSAst = js_ast.BundledAst;
const Loader = options.Loader;
pub const Index = @import("../ast/base.zig").Index;
const Symbol = js_ast.Symbol;
const EventLoop = bun.JSC.AnyEventLoop;
const MultiArrayList = bun.MultiArrayList;
const Stmt = js_ast.Stmt;
const Expr = js_ast.Expr;
const E = js_ast.E;
const S = js_ast.S;
const G = js_ast.G;
const B = js_ast.B;
const Binding = js_ast.Binding;
const AutoBitSet = bun.bit_set.AutoBitSet;
const renamer = bun.renamer;
const JSC = bun.JSC;
const debugTreeShake = Output.scoped(.TreeShake, true);
const Loc = Logger.Loc;
const bake = bun.bake;
const bundler = bun.bundle_v2;
const BundleV2 = bundler.BundleV2;
const Graph = bundler.Graph;
const LinkerGraph = bundler.LinkerGraph;
pub const DeferredBatchTask = bun.bundle_v2.DeferredBatchTask;
pub const ThreadPool = bun.bundle_v2.ThreadPool;
pub const ParseTask = bun.bundle_v2.ParseTask;
const ImportTracker = bundler.ImportTracker;
const MangledProps = bundler.MangledProps;
const NodeFallbackModules = @import("../node_fallbacks.zig");
const js_printer = @import("../js_printer.zig");
const lex = @import("../js_lexer.zig");
const linker = @import("../linker.zig");
const runtime = @import("../runtime.zig");
const std = @import("std");
const BabyList = @import("../collections/baby_list.zig").BabyList;
const js_ast = @import("../ast.zig");
const B = js_ast.B;
const Binding = js_ast.Binding;
const Dependency = js_ast.Dependency;
const E = js_ast.E;
const Expr = js_ast.Expr;
const G = js_ast.G;
const JSAst = js_ast.BundledAst;
const Part = js_ast.Part;
const S = js_ast.S;
const Stmt = js_ast.Stmt;
const Symbol = js_ast.Symbol;
const Logger = @import("../logger.zig");
const Loc = Logger.Loc;
const options = @import("../options.zig");
const Loader = options.Loader;
const _resolver = @import("../resolver/resolver.zig");
const Resolver = _resolver.Resolver;
const bun = @import("bun");
const Environment = bun.Environment;
const FeatureFlags = bun.FeatureFlags;
const ImportRecord = bun.ImportRecord;
const MultiArrayList = bun.MultiArrayList;
const MutableString = bun.MutableString;
const Output = bun.Output;
const StringJoiner = bun.StringJoiner;
const bake = bun.bake;
const base64 = bun.base64;
const renamer = bun.renamer;
const sourcemap = bun.sourcemap;
const string = bun.string;
const strings = bun.strings;
const sync = bun.ThreadPool;
const AutoBitSet = bun.bit_set.AutoBitSet;
const JSC = bun.JSC;
const EventLoop = bun.JSC.AnyEventLoop;
const bundler = bun.bundle_v2;
const AdditionalFile = bundler.AdditionalFile;
const BundleV2 = bundler.BundleV2;
const Chunk = bundler.Chunk;
const ServerComponentBoundary = bundler.ServerComponentBoundary;
const PartRange = bundler.PartRange;
const JSMeta = bundler.JSMeta;
const ExportData = bundler.ExportData;
const EntryPoint = bundler.EntryPoint;
const RefImportData = bundler.RefImportData;
const StableRef = bundler.StableRef;
const CompileResultForSourceMap = bundler.CompileResultForSourceMap;
const ContentHasher = bundler.ContentHasher;
const EntryPoint = bundler.EntryPoint;
const ExportData = bundler.ExportData;
const Graph = bundler.Graph;
const ImportTracker = bundler.ImportTracker;
const JSMeta = bundler.JSMeta;
const LinkerGraph = bundler.LinkerGraph;
const MangledProps = bundler.MangledProps;
const PartRange = bundler.PartRange;
const RefImportData = bundler.RefImportData;
const ServerComponentBoundary = bundler.ServerComponentBoundary;
const StableRef = bundler.StableRef;
const WrapKind = bundler.WrapKind;
const genericPathWithPrettyInitialized = bundler.genericPathWithPrettyInitialized;
const AdditionalFile = bundler.AdditionalFile;
const logPartDependencyTree = bundler.logPartDependencyTree;

View File

@@ -2079,7 +2079,7 @@ pub const BundleV2 = struct {
this.graph.input_files.items(.loader)[load.source_index.get()] = code.loader;
this.graph.input_files.items(.source)[load.source_index.get()].contents = code.source_code;
this.graph.input_files.items(.is_plugin_file)[load.source_index.get()] = true;
// Parse and store the sourcemap if provided
if (code.sourcemap) |sourcemap_str| {
if (bun.sourcemap.parseJSON(
@@ -2090,14 +2090,14 @@ pub const BundleV2 = struct {
)) |parsed| {
if (parsed.map) |map| {
this.graph.input_files.items(.sourcemap)[load.source_index.get()] = map;
// If we have sources_content, create a new InputFile for the original source
if (map.sources_content.len > 0 and map.sources_content[0].len > 0) {
const original_source_index = @as(u32, @intCast(this.graph.input_files.len));
// Copy the current source but with the original content
const current_source = &this.graph.input_files.items(.source)[load.source_index.get()];
// Create a new InputFile for the original source
this.graph.input_files.append(this.graph.allocator, .{
.source = Logger.Source{
@@ -2110,10 +2110,10 @@ pub const BundleV2 = struct {
.allocator = this.graph.allocator,
.is_plugin_file = true,
}) catch bun.outOfMemory();
// Also append an empty AST for this input file
this.graph.ast.append(this.graph.allocator, JSAst.empty) catch bun.outOfMemory();
// Set the original_source_index on the current file
this.graph.input_files.items(.original_source_index)[load.source_index.get()] = original_source_index;
}
@@ -2125,7 +2125,7 @@ pub const BundleV2 = struct {
// Free the sourcemap string since we've parsed it
if (!should_copy_for_bundling) this.free_list.append(sourcemap_str) catch unreachable;
}
var parse_task = load.parse_task;
parse_task.loader = code.loader;
if (!should_copy_for_bundling) this.free_list.append(code.source_code) catch unreachable;

View File

@@ -224,7 +224,7 @@ pub fn parseJSON(
}
ptr.sources_content = content_slice;
}
break :map ptr;
} else null;
errdefer if (map) |m| m.deref();

View File

@@ -1,6 +1,6 @@
import { describe, test, expect } from "bun:test";
import { itBundled } from "./expectBundled";
import { describe, expect, test } from "bun:test";
import { SourceMapConsumer } from "source-map";
import { itBundled } from "./expectBundled";
// Direct test to verify implementation works
test("onLoad plugin sourcemap support - basic", async () => {
@@ -8,41 +8,43 @@ test("onLoad plugin sourcemap support - basic", async () => {
entrypoints: [import.meta.dir + "/test-entry.js"],
outdir: import.meta.dir + "/out",
sourcemap: "external",
plugins: [{
name: "sourcemap-test",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, (args) => {
return {
path: args.path,
namespace: "transformed",
};
});
build.onLoad({ filter: /.*/, namespace: "transformed" }, () => {
const code = `console.log("transformed");`;
// Create a more complete sourcemap with actual mappings
const sourcemap = JSON.stringify({
version: 3,
sources: ["original.js"],
sourcesContent: [`console.log("original");`],
names: ["console", "log"],
// This mapping says: first segment at (0,0) in generated maps to (0,0) in source 0
mappings: "AAAA",
plugins: [
{
name: "sourcemap-test",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
return {
path: args.path,
namespace: "transformed",
};
});
return {
contents: code,
loader: "js",
sourcemap,
};
});
}
}],
build.onLoad({ filter: /.*/, namespace: "transformed" }, () => {
const code = `console.log("transformed");`;
// Create a more complete sourcemap with actual mappings
const sourcemap = JSON.stringify({
version: 3,
sources: ["original.js"],
sourcesContent: [`console.log("original");`],
names: ["console", "log"],
// This mapping says: first segment at (0,0) in generated maps to (0,0) in source 0
mappings: "AAAA",
});
return {
contents: code,
loader: "js",
sourcemap,
};
});
},
},
],
root: import.meta.dir,
});
expect(result.success).toBe(true);
expect(result.outputs.length).toBeGreaterThan(0);
// Check for sourcemap output
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
expect(sourcemapOutput).toBeDefined();
@@ -53,65 +55,67 @@ test("onLoad plugin sourcemap support - typescript", async () => {
const result = await Bun.build({
entrypoints: [import.meta.dir + "/test-entry.js"],
outdir: import.meta.dir + "/out2",
sourcemap: "external",
sourcemap: "external",
minify: false,
plugins: [{
name: "typescript-transform",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, (args) => {
return {
path: "virtual.ts",
namespace: "typescript",
};
});
build.onLoad({ filter: /.*/, namespace: "typescript" }, () => {
// Simulate TypeScript source
const originalCode = `function greet(name: string): void {
console.log("Hello, " + name);
}
greet("World");`;
// Transpiled JavaScript
const transpiledCode = `function greet(name) {
console.log("Hello, " + name);
}
greet("World");`;
// A proper sourcemap for this transformation
const sourcemap = JSON.stringify({
version: 3,
sources: ["virtual.ts"],
sourcesContent: [originalCode],
names: ["greet", "name", "console", "log"],
// Simple mapping like the working test
mappings: "AAAA",
plugins: [
{
name: "typescript-transform",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
return {
path: "virtual.ts",
namespace: "typescript",
};
});
return {
contents: transpiledCode,
loader: "js",
sourcemap,
};
});
}
}],
build.onLoad({ filter: /.*/, namespace: "typescript" }, () => {
// Simulate TypeScript source
const originalCode = `function greet(name: string): void {
console.log("Hello, " + name);
}
greet("World");`;
// Transpiled JavaScript
const transpiledCode = `function greet(name) {
console.log("Hello, " + name);
}
greet("World");`;
// A proper sourcemap for this transformation
const sourcemap = JSON.stringify({
version: 3,
sources: ["virtual.ts"],
sourcesContent: [originalCode],
names: ["greet", "name", "console", "log"],
// Simple mapping like the working test
mappings: "AAAA",
});
return {
contents: transpiledCode,
loader: "js",
sourcemap,
};
});
},
},
],
root: import.meta.dir,
});
expect(result.success).toBe(true);
// Check the generated sourcemap
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
expect(sourcemapOutput).toBeDefined();
const sourcemapText = await sourcemapOutput!.text();
const sourcemap = JSON.parse(sourcemapText);
// Should preserve the TypeScript source (with namespace prefix)
expect(sourcemap.sources[0]).toBe("typescript:virtual.ts");
expect(sourcemap.sourcesContent).toBeDefined();
// Verify the original TypeScript source is preserved
expect(sourcemap.sourcesContent[0]).toContain("function greet(name: string): void");
expect(sourcemap.version).toBe(3);
@@ -126,77 +130,79 @@ test("onLoad plugin sourcemap remapping", async () => {
outdir: import.meta.dir + "/out3",
sourcemap: "external",
minify: false,
plugins: [{
name: "sourcemap-remap-test",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, (args) => {
return {
path: "code.ts",
namespace: "transform",
};
});
build.onLoad({ filter: /.*/, namespace: "transform" }, () => {
// Original TypeScript-like code
const originalCode = `// Original comment
plugins: [
{
name: "sourcemap-remap-test",
setup(build) {
build.onResolve({ filter: /\.transformed\.js$/ }, args => {
return {
path: "code.ts",
namespace: "transform",
};
});
build.onLoad({ filter: /.*/, namespace: "transform" }, () => {
// Original TypeScript-like code
const originalCode = `// Original comment
function add(a: number, b: number): number {
return a + b;
}
console.log(add(1, 2));`;
// Transpiled JavaScript (simulating TypeScript output)
const transpiledCode = `// Original comment
// Transpiled JavaScript (simulating TypeScript output)
const transpiledCode = `// Original comment
function add(a, b) {
return a + b;
}
console.log(add(1, 2));`;
// This sourcemap maps the transpiled code back to the original
// Line 1 (comment) maps to line 1
// Line 2 (function) maps to line 2
// etc.
const sourcemap = JSON.stringify({
version: 3,
sources: ["code.ts"],
sourcesContent: [originalCode],
names: ["add", "a", "b", "console", "log"],
// Simple 1:1 line mapping
mappings: "AAAA;AACA;AACA;AACA;AACA",
// This sourcemap maps the transpiled code back to the original
// Line 1 (comment) maps to line 1
// Line 2 (function) maps to line 2
// etc.
const sourcemap = JSON.stringify({
version: 3,
sources: ["code.ts"],
sourcesContent: [originalCode],
names: ["add", "a", "b", "console", "log"],
// Simple 1:1 line mapping
mappings: "AAAA;AACA;AACA;AACA;AACA",
});
return {
contents: transpiledCode,
loader: "js",
sourcemap,
};
});
return {
contents: transpiledCode,
loader: "js",
sourcemap,
};
});
}
}],
},
},
],
root: import.meta.dir,
});
expect(result.success).toBe(true);
const sourcemapOutput = result.outputs.find(o => o.path.endsWith(".map"));
expect(sourcemapOutput).toBeDefined();
const sourcemapText = await sourcemapOutput!.text();
const sourcemap = JSON.parse(sourcemapText);
// Use source-map library to verify mappings work
const consumer = await new SourceMapConsumer(sourcemap);
// Check that we can map from generated position back to original
// The function "add" should be on line 2 in both files due to our simple mapping
const originalPos = consumer.originalPositionFor({
line: 2,
column: 9, // "add" in "function add"
});
// Should map back to the TypeScript file
expect(originalPos.source).toContain("code.ts");
expect(originalPos.line).toBe(2);
consumer.destroy();
});
@@ -243,7 +249,7 @@ greet("World");`;
if (!sourcemapFile) {
throw new Error("Expected sourcemap file to be generated");
}
const sourcemap = JSON.parse(sourcemapFile.text);
if (sourcemap.version !== 3) {
throw new Error("Expected sourcemap version 3");
@@ -264,13 +270,15 @@ greet("World");`;
plugins(builder) {
builder.onLoad({ filter: /\.transformed\.js$/ }, () => {
const code = `console.log("transformed");`;
const sourcemap = new TextEncoder().encode(JSON.stringify({
version: 3,
sources: ["original.js"],
sourcesContent: [`console.log("original");`],
names: ["console", "log"],
mappings: "AAAA",
}));
const sourcemap = new TextEncoder().encode(
JSON.stringify({
version: 3,
sources: ["original.js"],
sourcesContent: [`console.log("original");`],
names: ["console", "log"],
mappings: "AAAA",
}),
);
return {
contents: code,
@@ -340,7 +348,8 @@ greet(john);`;
sources: ["user.ts"],
sourcesContent: [tsCode],
names: ["greet", "user", "console", "log", "name", "age", "john"],
mappings: "AAIA,SAASA,MAAMC,OACbC,QAAQC,IAAI,WAAWF,KAAKG,aAAaH,KAAKI,eAGhD,MAAMC,MAAQ,CAAEF,KAAM,OAAQC,IAAK,IACnCL,MAAMM",
mappings:
"AAIA,SAASA,MAAMC,OACbC,QAAQC,IAAI,WAAWF,KAAKG,aAAaH,KAAKI,eAGhD,MAAMC,MAAQ,CAAEF,KAAM,OAAQC,IAAK,IACnCL,MAAMM",
});
return {
@@ -357,7 +366,7 @@ greet(john);`;
if (!sourcemapFile) {
throw new Error("Expected sourcemap file to be generated");
}
const sourcemap = JSON.parse(sourcemapFile.text);
// Should preserve the original TypeScript source
if (!sourcemap.sources.includes("user.ts")) {
@@ -369,4 +378,4 @@ greet(john);`;
},
});
});
});
});