mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
1 Commits
dylan/pyth
...
jarred/ena
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a7c7070d5c |
@@ -70,6 +70,16 @@ ts_enums: TsEnumsMap = .{},
|
||||
has_commonjs_export_names: bool = false,
|
||||
import_meta_ref: Ref = Ref.None,
|
||||
|
||||
/// `#sourceURL="foo"`
|
||||
///
|
||||
/// This is a slice into the source.contents.
|
||||
sourceURL: js_ast.Span = .{},
|
||||
|
||||
/// `#sourceMappingURL=bar.map`
|
||||
///
|
||||
/// This is a slice into the source.contents.
|
||||
sourceMappingURL: js_ast.Span = .{},
|
||||
|
||||
pub const CommonJSNamedExport = struct {
|
||||
loc_ref: LocRef,
|
||||
needs_decl: bool = true,
|
||||
|
||||
@@ -3127,6 +3127,21 @@ pub const Data = union(Tag) {
|
||||
pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null;
|
||||
pub threadlocal var disable_reset = false;
|
||||
|
||||
pub const DisableResetScope = struct {
|
||||
no_op: bool,
|
||||
|
||||
pub fn exit(self: DisableResetScope) void {
|
||||
if (!self.no_op)
|
||||
disable_reset = false;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn disableResetIfNecessary() DisableResetScope {
|
||||
const no_op = !disable_reset and memory_allocator == null;
|
||||
disable_reset = if (!no_op) true else false;
|
||||
return .{ .no_op = no_op };
|
||||
}
|
||||
|
||||
pub fn create() void {
|
||||
if (instance != null or memory_allocator != null) {
|
||||
return;
|
||||
|
||||
@@ -334,6 +334,26 @@ pub const Data = union(Tag) {
|
||||
pub threadlocal var memory_allocator: ?*ASTMemoryAllocator = null;
|
||||
pub threadlocal var disable_reset = false;
|
||||
|
||||
pub const DisableResetScope = struct {
|
||||
no_op: bool,
|
||||
|
||||
pub fn exit(self: DisableResetScope) void {
|
||||
if (!self.no_op)
|
||||
disable_reset = false;
|
||||
}
|
||||
};
|
||||
|
||||
/// Prevent a call to .reset() from doing anything
|
||||
///
|
||||
/// This is useful for when you may add additional AST nodes immediately after parsing.
|
||||
pub fn disableResetIfNecessary() DisableResetScope {
|
||||
// If we were using ASTMemoryAllocator, or if we were already disabled, then we shouldn't do anything.
|
||||
const no_op = !disable_reset and memory_allocator == null;
|
||||
|
||||
disable_reset = if (!no_op) true else false;
|
||||
return .{ .no_op = no_op };
|
||||
}
|
||||
|
||||
pub fn create() void {
|
||||
if (instance != null or memory_allocator != null) {
|
||||
return;
|
||||
|
||||
@@ -743,7 +743,7 @@ pub const AsyncModule = struct {
|
||||
var mapper = jsc_vm.sourceMapHandler(&printer);
|
||||
defer VirtualMachine.source_code_printer.?.* = printer;
|
||||
_ = try jsc_vm.transpiler.printWithSourceMap(
|
||||
parse_result,
|
||||
&parse_result,
|
||||
@TypeOf(&printer),
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
@@ -981,6 +981,7 @@ pub fn transpileSourceCode(
|
||||
setBreakPointOnFirstLine(),
|
||||
.runtime_transpiler_cache = if (!disable_transpilying and !JSC.RuntimeTranspilerCache.is_disabled) &cache else null,
|
||||
.remove_cjs_module_wrapper = is_main and jsc_vm.module_loader.eval_source != null,
|
||||
.parse_source_map = virtual_source != null or jsc_vm.enable_user_source_maps,
|
||||
};
|
||||
defer {
|
||||
if (should_close_input_file_fd and input_file_fd != bun.invalid_fd) {
|
||||
@@ -1030,6 +1031,12 @@ pub fn transpileSourceCode(
|
||||
},
|
||||
};
|
||||
|
||||
defer {
|
||||
if (parse_result.source_map) |source_map| {
|
||||
source_map.deref();
|
||||
}
|
||||
}
|
||||
|
||||
const source = &parse_result.source;
|
||||
|
||||
if (parse_result.loader == .wasm) {
|
||||
@@ -1247,7 +1254,7 @@ pub fn transpileSourceCode(
|
||||
var mapper = jsc_vm.sourceMapHandler(&printer);
|
||||
|
||||
break :brk try jsc_vm.transpiler.printWithSourceMap(
|
||||
parse_result,
|
||||
&parse_result,
|
||||
@TypeOf(&printer),
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
@@ -2423,6 +2430,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
.remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null,
|
||||
.module_type = module_type,
|
||||
.allow_bytecode_cache = true,
|
||||
.parse_source_map = vm.enable_user_source_maps,
|
||||
};
|
||||
|
||||
defer {
|
||||
@@ -2468,6 +2476,12 @@ pub const RuntimeTranspilerStore = struct {
|
||||
return;
|
||||
};
|
||||
|
||||
defer {
|
||||
if (parse_result.source_map) |source_map| {
|
||||
source_map.deref();
|
||||
}
|
||||
}
|
||||
|
||||
if (vm.isWatcherEnabled()) {
|
||||
if (input_file_fd.isValid()) {
|
||||
if (!is_node_override and
|
||||
@@ -2581,7 +2595,7 @@ pub const RuntimeTranspilerStore = struct {
|
||||
var mapper = vm.sourceMapHandler(&printer);
|
||||
defer source_code_printer.?.* = printer;
|
||||
_ = transpiler.printWithSourceMap(
|
||||
parse_result,
|
||||
&parse_result,
|
||||
@TypeOf(&printer),
|
||||
&printer,
|
||||
.esm_ascii,
|
||||
|
||||
@@ -44,6 +44,7 @@ standalone_module_graph: ?*bun.StandaloneModuleGraph = null,
|
||||
smol: bool = false,
|
||||
dns_result_order: DNSResolver.Order = .verbatim,
|
||||
counters: Counters = .{},
|
||||
enable_user_source_maps: bool = false,
|
||||
|
||||
hot_reload: bun.CLI.Command.HotReload = .none,
|
||||
jsc: *VM = undefined,
|
||||
@@ -972,6 +973,7 @@ pub fn initWithModuleGraph(
|
||||
.standalone_module_graph = opts.graph.?,
|
||||
.debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(),
|
||||
.destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit,
|
||||
.enable_user_source_maps = opts.enable_user_source_maps,
|
||||
};
|
||||
vm.source_mappings.init(&vm.saved_source_map_table);
|
||||
vm.regular_event_loop.tasks = EventLoop.Queue.init(
|
||||
@@ -1042,6 +1044,7 @@ pub const Options = struct {
|
||||
/// Worker VMs are always destroyed on exit, regardless of this setting. Setting this to
|
||||
/// true may expose bugs that would otherwise only occur using Workers.
|
||||
destruct_main_thread_on_exit: bool = false,
|
||||
enable_user_source_maps: bool = false,
|
||||
};
|
||||
|
||||
pub var is_smol_mode = false;
|
||||
@@ -1093,6 +1096,7 @@ pub fn init(opts: Options) !*VirtualMachine {
|
||||
.ref_strings_mutex = .{},
|
||||
.debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(),
|
||||
.destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit,
|
||||
.enable_user_source_maps = opts.enable_user_source_maps,
|
||||
};
|
||||
vm.source_mappings.init(&vm.saved_source_map_table);
|
||||
vm.regular_event_loop.tasks = EventLoop.Queue.init(
|
||||
@@ -1252,6 +1256,7 @@ pub fn initWorker(
|
||||
.debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(),
|
||||
// This option is irrelevant for Workers
|
||||
.destruct_main_thread_on_exit = false,
|
||||
.enable_user_source_maps = opts.enable_user_source_maps,
|
||||
};
|
||||
vm.source_mappings.init(&vm.saved_source_map_table);
|
||||
vm.regular_event_loop.tasks = EventLoop.Queue.init(
|
||||
@@ -1341,6 +1346,7 @@ pub fn initBake(opts: Options) anyerror!*VirtualMachine {
|
||||
.ref_strings_mutex = .{},
|
||||
.debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(),
|
||||
.destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit,
|
||||
.enable_user_source_maps = opts.enable_user_source_maps,
|
||||
};
|
||||
vm.source_mappings.init(&vm.saved_source_map_table);
|
||||
vm.regular_event_loop.tasks = EventLoop.Queue.init(
|
||||
|
||||
@@ -42,7 +42,7 @@ pub fn parse(
|
||||
&writer,
|
||||
parse_result,
|
||||
source,
|
||||
.{
|
||||
&.{
|
||||
.mangled_props = null,
|
||||
},
|
||||
) catch {
|
||||
|
||||
@@ -1880,7 +1880,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
|
||||
&writer,
|
||||
bun.Global.BunInfo.generate(*Transpiler, &JSC.VirtualMachine.get().transpiler, allocator) catch unreachable,
|
||||
source,
|
||||
.{ .mangled_props = null },
|
||||
&.{ .mangled_props = null },
|
||||
) catch unreachable;
|
||||
|
||||
resp.writeStatus("200 OK");
|
||||
|
||||
@@ -56,6 +56,7 @@ pub const Run = struct {
|
||||
.graph = graph_ptr,
|
||||
.is_main_thread = true,
|
||||
.destruct_main_thread_on_exit = bun.getRuntimeFeatureFlag(.BUN_DESTRUCT_VM_ON_EXIT),
|
||||
.enable_user_source_maps = ctx.runtime_options.enable_user_source_maps,
|
||||
}),
|
||||
.arena = arena,
|
||||
.ctx = ctx,
|
||||
@@ -194,6 +195,7 @@ pub const Run = struct {
|
||||
.dns_result_order = DNSResolver.Order.fromStringOrDie(ctx.runtime_options.dns_result_order),
|
||||
.is_main_thread = true,
|
||||
.destruct_main_thread_on_exit = bun.getRuntimeFeatureFlag(.BUN_DESTRUCT_VM_ON_EXIT),
|
||||
.enable_user_source_maps = ctx.runtime_options.enable_user_source_maps,
|
||||
},
|
||||
),
|
||||
.arena = arena,
|
||||
|
||||
@@ -1179,7 +1179,7 @@ pub const LinkerContext = struct {
|
||||
.{ .stmts = out_stmts },
|
||||
};
|
||||
|
||||
const print_options = js_printer.Options{
|
||||
const print_options = &js_printer.Options{
|
||||
.bundling = true,
|
||||
// TODO: IIFE
|
||||
.indent = .{},
|
||||
|
||||
@@ -26,7 +26,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu
|
||||
const runtimeRequireRef = if (c.options.output_format == .cjs) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref);
|
||||
|
||||
{
|
||||
const print_options = js_printer.Options{
|
||||
const print_options = &js_printer.Options{
|
||||
.bundling = true,
|
||||
.indent = .{},
|
||||
.has_run_symbol_renamer = true,
|
||||
@@ -820,7 +820,7 @@ pub fn generateEntryPointTailJS(
|
||||
};
|
||||
}
|
||||
|
||||
const print_options = js_printer.Options{
|
||||
const print_options = &js_printer.Options{
|
||||
// TODO: IIFE indent
|
||||
.indent = .{},
|
||||
.has_run_symbol_renamer = true,
|
||||
|
||||
@@ -388,6 +388,7 @@ pub const Command = struct {
|
||||
/// compatibility.
|
||||
expose_gc: bool = false,
|
||||
preserve_symlinks_main: bool = false,
|
||||
enable_user_source_maps: bool = false,
|
||||
};
|
||||
|
||||
var global_cli_ctx: Context = undefined;
|
||||
|
||||
@@ -108,6 +108,7 @@ pub const runtime_params_ = [_]ParamType{
|
||||
clap.parseParam("--redis-preconnect Preconnect to $REDIS_URL at startup") catch unreachable,
|
||||
clap.parseParam("--no-addons Throw an error if process.dlopen is called, and disable export condition \"node-addons\"") catch unreachable,
|
||||
clap.parseParam("--unhandled-rejections <STR> One of \"strict\", \"throw\", \"warn\", \"none\", or \"warn-with-error-code\"") catch unreachable,
|
||||
clap.parseParam("--enable-source-maps Enable input source maps for all files") catch unreachable,
|
||||
};
|
||||
|
||||
pub const auto_or_run_params = [_]ParamType{
|
||||
@@ -667,6 +668,7 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
||||
ctx.runtime_options.smol = args.flag("--smol");
|
||||
ctx.runtime_options.preconnect = args.options("--fetch-preconnect");
|
||||
ctx.runtime_options.expose_gc = args.flag("--expose-gc");
|
||||
ctx.runtime_options.enable_user_source_maps = args.flag("--enable-source-maps");
|
||||
|
||||
if (args.option("--dns-result-order")) |order| {
|
||||
ctx.runtime_options.dns_result_order = order;
|
||||
|
||||
@@ -1430,7 +1430,7 @@ pub const CreateCommand = struct {
|
||||
&package_json_writer,
|
||||
package_json_expr,
|
||||
source,
|
||||
.{ .mangled_props = null },
|
||||
&.{ .mangled_props = null },
|
||||
) catch |err| {
|
||||
Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
|
||||
package_json_file = null;
|
||||
|
||||
@@ -779,7 +779,7 @@ pub const InitCommand = struct {
|
||||
&package_json_writer,
|
||||
js_ast.Expr{ .data = .{ .e_object = fields.object }, .loc = logger.Loc.Empty },
|
||||
&logger.Source.initEmptyFile("package.json"),
|
||||
.{ .mangled_props = null },
|
||||
&.{ .mangled_props = null },
|
||||
) catch |err| {
|
||||
Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
|
||||
package_json_file = null;
|
||||
|
||||
@@ -2183,7 +2183,7 @@ pub const PackCommand = struct {
|
||||
|
||||
// shouldn't be used
|
||||
&json.source,
|
||||
.{
|
||||
&.{
|
||||
.indent = json.indentation,
|
||||
.mangled_props = null,
|
||||
},
|
||||
|
||||
@@ -421,7 +421,7 @@ pub const TrustCommand = struct {
|
||||
buffer_writer.append_newline = package_json_contents.len > 0 and package_json_contents[package_json_contents.len - 1] == '\n';
|
||||
var package_json_writer = bun.js_printer.BufferPrinter.init(buffer_writer);
|
||||
|
||||
_ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source, .{ .mangled_props = null }) catch |err| {
|
||||
_ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source, &.{ .mangled_props = null }) catch |err| {
|
||||
Output.errGeneric("failed to print package.json: {s}", .{@errorName(err)});
|
||||
Global.crash();
|
||||
};
|
||||
|
||||
@@ -242,7 +242,7 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin
|
||||
&package_json_writer,
|
||||
value,
|
||||
source,
|
||||
.{
|
||||
&.{
|
||||
.mangled_props = null,
|
||||
},
|
||||
);
|
||||
@@ -278,7 +278,7 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin
|
||||
&package_json_writer,
|
||||
manifest,
|
||||
source,
|
||||
.{
|
||||
&.{
|
||||
.mangled_props = null,
|
||||
.indent = .{
|
||||
.count = 2,
|
||||
|
||||
@@ -967,7 +967,7 @@ pub const PublishCommand = struct {
|
||||
&writer,
|
||||
json.*,
|
||||
json_source,
|
||||
.{
|
||||
&.{
|
||||
.minify_whitespace = true,
|
||||
.mangled_props = null,
|
||||
},
|
||||
|
||||
@@ -243,7 +243,7 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
&package_json_writer,
|
||||
current_package_json.root,
|
||||
¤t_package_json.source,
|
||||
.{
|
||||
&.{
|
||||
.indent = current_package_json_indent,
|
||||
.mangled_props = null,
|
||||
},
|
||||
@@ -320,7 +320,7 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
&package_json_writer2,
|
||||
root_package_json.root,
|
||||
&root_package_json.source,
|
||||
.{
|
||||
&.{
|
||||
.indent = root_package_json.indentation,
|
||||
.mangled_props = null,
|
||||
},
|
||||
@@ -384,7 +384,7 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
&package_json_writer_two,
|
||||
new_package_json,
|
||||
source,
|
||||
.{
|
||||
&.{
|
||||
.indent = current_package_json_indent,
|
||||
.mangled_props = null,
|
||||
},
|
||||
|
||||
@@ -149,7 +149,7 @@ fn NewLexer_(
|
||||
code_point: CodePoint = -1,
|
||||
identifier: []const u8 = "",
|
||||
jsx_pragma: JSXPragma = .{},
|
||||
source_mapping_url: ?js_ast.Span = null,
|
||||
|
||||
number: f64 = 0.0,
|
||||
rescan_close_brace_as_template_token: bool = false,
|
||||
prev_error_loc: logger.Loc = logger.Loc.Empty,
|
||||
@@ -169,6 +169,9 @@ fn NewLexer_(
|
||||
track_comments: bool = false,
|
||||
all_comments: std.ArrayList(logger.Range),
|
||||
|
||||
sourceMappingURL: js_ast.Span = .{},
|
||||
sourceURL: js_ast.Span = .{},
|
||||
|
||||
indent_info: if (json_options.guess_indentation)
|
||||
struct {
|
||||
guess: Indentation = .{},
|
||||
@@ -2031,7 +2034,13 @@ fn NewLexer_(
|
||||
if (span.range.len > 0) @as(usize, @intCast(span.range.len)) else 0;
|
||||
}
|
||||
} else if (chunk.len >= " sourceMappingURL=".len + 1 and strings.hasPrefixComptime(chunk, " sourceMappingURL=")) { // Check includes space for prefix
|
||||
return PragmaArg.scanSourceMappingURLValue(lexer.start, offset_for_errors, chunk, &lexer.source_mapping_url);
|
||||
return PragmaArg.scanSourceMappingURLValue(lexer.start, offset_for_errors, chunk, &lexer.sourceMappingURL);
|
||||
} else if (chunk.len >= " sourceURL=".len + 1 and strings.hasPrefixComptime(chunk, " sourceURL=")) { // Check includes space for prefix
|
||||
if (PragmaArg.scan(.skip_space_first, lexer.start + offset_for_errors, "sourceURL", chunk, allow_newline)) |span| {
|
||||
lexer.sourceURL = span;
|
||||
return "sourceURL".len +
|
||||
if (span.range.len > 0) @as(usize, @intCast(span.range.len)) else 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -3258,7 +3267,7 @@ pub const PragmaArg = enum {
|
||||
// These can be extremely long, so we use SIMD.
|
||||
/// "//# sourceMappingURL=data:/adspaoksdpkz"
|
||||
/// ^^^^^^^^^^^^^^^^^^
|
||||
pub fn scanSourceMappingURLValue(start: usize, offset_for_errors: usize, chunk: string, result: *?js_ast.Span) usize {
|
||||
pub fn scanSourceMappingURLValue(start: usize, offset_for_errors: usize, chunk: string, result: *js_ast.Span) usize {
|
||||
const prefix: u32 = " sourceMappingURL=".len;
|
||||
const url_and_rest_of_code = chunk[prefix..]; // Slice containing only the potential argument
|
||||
|
||||
|
||||
@@ -23922,6 +23922,9 @@ fn NewParser_(
|
||||
.ts_enums = try p.computeTsEnumsMap(allocator),
|
||||
|
||||
.import_meta_ref = p.import_meta_ref,
|
||||
|
||||
.sourceURL = p.lexer.sourceURL,
|
||||
.sourceMappingURL = p.lexer.sourceMappingURL,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -456,6 +456,7 @@ pub const Options = struct {
|
||||
minify_identifiers: bool = false,
|
||||
minify_syntax: bool = false,
|
||||
print_dce_annotations: bool = true,
|
||||
input_source_map: ?*bun.sourcemap.ParsedSourceMap = null,
|
||||
|
||||
transform_only: bool = false,
|
||||
inline_require_and_import_errors: bool = true,
|
||||
@@ -5225,13 +5226,13 @@ fn NewPrinter(
|
||||
pub fn init(
|
||||
writer: Writer,
|
||||
import_records: []const ImportRecord,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
renamer: bun.renamer.Renamer,
|
||||
source_map_builder: SourceMap.Chunk.Builder,
|
||||
) Printer {
|
||||
var printer = Printer{
|
||||
.import_records = import_records,
|
||||
.options = opts,
|
||||
.options = opts.*,
|
||||
.writer = writer,
|
||||
.renamer = renamer,
|
||||
.source_map_builder = source_map_builder,
|
||||
@@ -5676,7 +5677,7 @@ const GenerateSourceMap = enum {
|
||||
pub fn getSourceMapBuilder(
|
||||
comptime generate_source_map: GenerateSourceMap,
|
||||
comptime is_bun_platform: bool,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
source: *const logger.Source,
|
||||
tree: *const Ast,
|
||||
) SourceMap.Chunk.Builder {
|
||||
@@ -5702,6 +5703,7 @@ pub fn getSourceMapBuilder(
|
||||
);
|
||||
break :brk .empty;
|
||||
},
|
||||
.input_source_map = opts.input_source_map,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -5712,7 +5714,7 @@ pub fn printAst(
|
||||
symbols: js_ast.Symbol.Map,
|
||||
source: *const logger.Source,
|
||||
comptime ascii_only: bool,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
comptime generate_source_map: bool,
|
||||
) !usize {
|
||||
var renamer: rename.Renamer = undefined;
|
||||
@@ -5836,7 +5838,7 @@ pub fn printAst(
|
||||
printer.print("var {require}=import.meta;");
|
||||
}
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (tree.parts.slice()) |*part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt);
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
@@ -5875,7 +5877,7 @@ pub fn printJSON(
|
||||
_writer: Writer,
|
||||
expr: Expr,
|
||||
source: *const logger.Source,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
) !usize {
|
||||
const PrinterType = NewPrinter(false, Writer, false, false, true, false);
|
||||
const writer = _writer;
|
||||
@@ -5915,7 +5917,7 @@ pub fn print(
|
||||
target: options.Target,
|
||||
ast: Ast,
|
||||
source: *const logger.Source,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
import_records: []const ImportRecord,
|
||||
parts: []const js_ast.Part,
|
||||
renamer: bun.renamer.Renamer,
|
||||
@@ -5947,7 +5949,7 @@ pub fn printWithWriter(
|
||||
target: options.Target,
|
||||
ast: Ast,
|
||||
source: *const logger.Source,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
import_records: []const ImportRecord,
|
||||
parts: []const js_ast.Part,
|
||||
renamer: bun.renamer.Renamer,
|
||||
@@ -5976,7 +5978,7 @@ pub fn printWithWriterAndPlatform(
|
||||
comptime is_bun_platform: bool,
|
||||
ast: Ast,
|
||||
source: *const logger.Source,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
import_records: []const ImportRecord,
|
||||
parts: []const js_ast.Part,
|
||||
renamer: bun.renamer.Renamer,
|
||||
@@ -6066,7 +6068,7 @@ pub fn printCommonJS(
|
||||
symbols: js_ast.Symbol.Map,
|
||||
source: *const logger.Source,
|
||||
comptime ascii_only: bool,
|
||||
opts: Options,
|
||||
opts: *const Options,
|
||||
comptime generate_source_map: bool,
|
||||
) !usize {
|
||||
const prev_action = bun.crash_handler.current_action;
|
||||
@@ -6087,7 +6089,7 @@ pub fn printCommonJS(
|
||||
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
defer printer.binary_expression_stack.clearAndFree();
|
||||
|
||||
for (tree.parts.slice()) |part| {
|
||||
for (tree.parts.slice()) |*part| {
|
||||
for (part.stmts) |stmt| {
|
||||
try printer.printStmt(stmt);
|
||||
if (printer.writer.getError()) {} else |err| {
|
||||
|
||||
@@ -1014,7 +1014,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void {
|
||||
|
||||
const buffer_writer = js_printer.BufferWriter.init(default_allocator);
|
||||
var writer = js_printer.BufferPrinter.init(buffer_writer);
|
||||
const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, source, .{
|
||||
const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, source, &.{
|
||||
.mangled_props = null,
|
||||
});
|
||||
var js = writer.ctx.buffer.list.items.ptr[0 .. written + 1];
|
||||
|
||||
@@ -63,7 +63,7 @@ pub const ParseUrl = struct {
|
||||
/// The mappings are owned by the `alloc` allocator.
|
||||
/// Temporary allocations are made to the `arena` allocator, which
|
||||
/// should be an arena allocator (caller is assumed to call `deinit`).
|
||||
pub fn parseUrl(
|
||||
pub fn parseInlineSourceMap(
|
||||
alloc: std.mem.Allocator,
|
||||
arena: std.mem.Allocator,
|
||||
source: []const u8,
|
||||
@@ -99,6 +99,85 @@ pub fn parseUrl(
|
||||
return parseJSON(alloc, arena, json_bytes, hint);
|
||||
}
|
||||
|
||||
pub fn parseDataURL(
|
||||
alloc: std.mem.Allocator,
|
||||
arena: std.mem.Allocator,
|
||||
source: []const u8,
|
||||
hint: ParseUrlResultHint,
|
||||
) !ParseUrl {
|
||||
const json_bytes = json_bytes: {
|
||||
const data_prefix = "data:application/json";
|
||||
|
||||
try_data_url: {
|
||||
debug("parse (data url, {d} bytes)", .{source.len});
|
||||
switch (source[data_prefix.len]) {
|
||||
';' => {
|
||||
const encoding = bun.sliceTo(source[data_prefix.len + 1 ..], ',');
|
||||
if (!bun.strings.eqlComptime(encoding, "base64")) break :try_data_url;
|
||||
const base64_data = source[data_prefix.len + ";base64,".len ..];
|
||||
|
||||
const len = bun.base64.decodeLen(base64_data);
|
||||
const bytes = arena.alloc(u8, len) catch bun.outOfMemory();
|
||||
const decoded = bun.base64.decode(bytes, base64_data);
|
||||
if (!decoded.isSuccessful()) {
|
||||
return error.InvalidBase64;
|
||||
}
|
||||
break :json_bytes bytes[0..decoded.count];
|
||||
},
|
||||
',' => break :json_bytes source[data_prefix.len + 1 ..],
|
||||
else => break :try_data_url,
|
||||
}
|
||||
}
|
||||
|
||||
return error.UnsupportedFormat;
|
||||
};
|
||||
|
||||
return parseJSON(alloc, arena, json_bytes, hint);
|
||||
}
|
||||
|
||||
/// Parse a sourceMappingURL comment from either a data: URI, a local file path
|
||||
/// or an absolute file path.
|
||||
pub fn parseSourceMappingURL(
|
||||
alloc: std.mem.Allocator,
|
||||
arena: std.mem.Allocator,
|
||||
relative_to_source: *const bun.logger.Source,
|
||||
path_or_data_url: []const u8,
|
||||
hint: ParseUrlResultHint,
|
||||
) !ParseUrl {
|
||||
const data_prefix = "data:application/json";
|
||||
|
||||
if (bun.strings.hasPrefixComptime(path_or_data_url, data_prefix) and path_or_data_url.len > (data_prefix.len + 1)) {
|
||||
return try parseDataURL(alloc, arena, path_or_data_url, hint);
|
||||
}
|
||||
|
||||
if (!relative_to_source.path.isFile()) {
|
||||
return error.UnsupportedFormat;
|
||||
}
|
||||
|
||||
const path_buffer = bun.PathBufferPool.get();
|
||||
defer bun.PathBufferPool.put(path_buffer);
|
||||
|
||||
const path = bun.path.joinAbsStringBufZ(
|
||||
bun.fs.FileSystem.instance.top_level_dir,
|
||||
path_buffer,
|
||||
&[_][]const u8{ relative_to_source.path.sourceDir(), path_or_data_url },
|
||||
.loose,
|
||||
);
|
||||
|
||||
const json_source = try bun.sys.File.toSourceAt(
|
||||
bun.FD.cwd(),
|
||||
path,
|
||||
arena,
|
||||
.{ .convert_bom = true },
|
||||
).unwrap();
|
||||
defer arena.free(json_source.contents);
|
||||
|
||||
var log = bun.logger.Log.init(arena);
|
||||
defer log.deinit();
|
||||
|
||||
return try parseJSONFromSource(alloc, arena, &json_source, &log, hint);
|
||||
}
|
||||
|
||||
/// Parses a JSON source-map
|
||||
///
|
||||
/// `source` must be in UTF-8 and can be freed after this call.
|
||||
@@ -111,10 +190,20 @@ pub fn parseJSON(
|
||||
source: []const u8,
|
||||
hint: ParseUrlResultHint,
|
||||
) !ParseUrl {
|
||||
const json_src = bun.logger.Source.initPathString("sourcemap.json", source);
|
||||
var log = bun.logger.Log.init(arena);
|
||||
defer log.deinit();
|
||||
|
||||
const json_src = &bun.logger.Source.initPathString("sourcemap.json", source);
|
||||
return parseJSONFromSource(alloc, arena, json_src, &log, hint);
|
||||
}
|
||||
|
||||
pub fn parseJSONFromSource(
|
||||
alloc: std.mem.Allocator,
|
||||
arena: std.mem.Allocator,
|
||||
json_source: *const bun.logger.Source,
|
||||
log: *bun.logger.Log,
|
||||
hint: ParseUrlResultHint,
|
||||
) !ParseUrl {
|
||||
// the allocator given to the JS parser is not respected for all parts
|
||||
// of the parse, so we need to remember to reset the ast store
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
@@ -125,8 +214,8 @@ pub fn parseJSON(
|
||||
bun.JSAst.Expr.Data.Store.reset();
|
||||
bun.JSAst.Stmt.Data.Store.reset();
|
||||
}
|
||||
debug("parse (JSON, {d} bytes)", .{source.len});
|
||||
var json = bun.JSON.parse(&json_src, &log, arena, false) catch {
|
||||
debug("parse (JSON, {d} bytes)", .{json_source.contents.len});
|
||||
var json = bun.JSON.parse(json_source, log, arena, false) catch {
|
||||
return error.InvalidJSON;
|
||||
};
|
||||
|
||||
@@ -822,7 +911,7 @@ pub fn getSourceMapImpl(
|
||||
|
||||
break :parsed .{
|
||||
.is_inline_map,
|
||||
parseUrl(
|
||||
parseInlineSourceMap(
|
||||
bun.default_allocator,
|
||||
allocator,
|
||||
found_url.slice(),
|
||||
@@ -1455,7 +1544,7 @@ pub const Chunk = struct {
|
||||
pub fn NewBuilder(comptime SourceMapFormatType: type) type {
|
||||
return struct {
|
||||
const ThisBuilder = @This();
|
||||
input_source_map: ?*SourceMap = null,
|
||||
input_source_map: ?*ParsedSourceMap = null,
|
||||
source_map: SourceMapper,
|
||||
line_offset_tables: LineOffsetTable.List = .{},
|
||||
prev_state: SourceMapState = SourceMapState{},
|
||||
@@ -1572,7 +1661,7 @@ pub const Chunk = struct {
|
||||
var current_state = current_state_;
|
||||
// If the input file had a source map, map all the way back to the original
|
||||
if (b.input_source_map) |input| {
|
||||
if (input.find(current_state.original_line, current_state.original_column)) |mapping| {
|
||||
if (Mapping.find(input.mappings, current_state.original_line, current_state.original_column)) |mapping| {
|
||||
current_state.source_index = mapping.sourceIndex();
|
||||
current_state.original_line = mapping.originalLine();
|
||||
current_state.original_column = mapping.originalColumn();
|
||||
|
||||
@@ -52,6 +52,7 @@ pub const ParseResult = struct {
|
||||
input_fd: ?StoredFileDescriptorType = null,
|
||||
empty: bool = false,
|
||||
pending_imports: _resolver.PendingResolution.List = .{},
|
||||
source_map: ?*bun.sourcemap.ParsedSourceMap = null,
|
||||
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null,
|
||||
|
||||
@@ -808,6 +809,7 @@ pub const Transpiler = struct {
|
||||
comptime enable_source_map: bool,
|
||||
source_map_context: ?js_printer.SourceMapHandler,
|
||||
runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache,
|
||||
input_source_map: ?*bun.sourcemap.ParsedSourceMap,
|
||||
) !usize {
|
||||
const tracer = if (enable_source_map)
|
||||
bun.perf.trace("JSPrinter.printWithSourceMap")
|
||||
@@ -825,7 +827,7 @@ pub const Transpiler = struct {
|
||||
js_ast.Symbol.Map.initList(symbols),
|
||||
source,
|
||||
false,
|
||||
.{
|
||||
&.{
|
||||
.bundling = false,
|
||||
.runtime_imports = ast.runtime_imports,
|
||||
.require_ref = ast.require_ref,
|
||||
@@ -838,6 +840,7 @@ pub const Transpiler = struct {
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
.print_dce_annotations = transpiler.options.emit_dce_annotations,
|
||||
.hmr_ref = ast.wrapper_ref,
|
||||
.input_source_map = input_source_map,
|
||||
},
|
||||
enable_source_map,
|
||||
),
|
||||
@@ -849,7 +852,7 @@ pub const Transpiler = struct {
|
||||
js_ast.Symbol.Map.initList(symbols),
|
||||
source,
|
||||
false,
|
||||
.{
|
||||
&.{
|
||||
.bundling = false,
|
||||
.runtime_imports = ast.runtime_imports,
|
||||
.require_ref = ast.require_ref,
|
||||
@@ -863,6 +866,7 @@ pub const Transpiler = struct {
|
||||
.runtime_transpiler_cache = runtime_transpiler_cache,
|
||||
.print_dce_annotations = transpiler.options.emit_dce_annotations,
|
||||
.hmr_ref = ast.wrapper_ref,
|
||||
.input_source_map = input_source_map,
|
||||
.mangled_props = null,
|
||||
},
|
||||
enable_source_map,
|
||||
@@ -875,7 +879,7 @@ pub const Transpiler = struct {
|
||||
js_ast.Symbol.Map.initList(symbols),
|
||||
source,
|
||||
is_bun,
|
||||
.{
|
||||
&.{
|
||||
.bundling = false,
|
||||
.runtime_imports = ast.runtime_imports,
|
||||
.require_ref = ast.require_ref,
|
||||
@@ -899,6 +903,7 @@ pub const Transpiler = struct {
|
||||
.target = transpiler.options.target,
|
||||
.print_dce_annotations = transpiler.options.emit_dce_annotations,
|
||||
.hmr_ref = ast.wrapper_ref,
|
||||
.input_source_map = input_source_map,
|
||||
.mangled_props = null,
|
||||
},
|
||||
enable_source_map,
|
||||
@@ -924,12 +929,13 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn printWithSourceMap(
|
||||
transpiler: *Transpiler,
|
||||
result: ParseResult,
|
||||
result: *const ParseResult,
|
||||
comptime Writer: type,
|
||||
writer: Writer,
|
||||
comptime format: js_printer.Format,
|
||||
@@ -945,6 +951,7 @@ pub const Transpiler = struct {
|
||||
false,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
result.source_map,
|
||||
);
|
||||
}
|
||||
return transpiler.printWithSourceMapMaybe(
|
||||
@@ -956,6 +963,7 @@ pub const Transpiler = struct {
|
||||
true,
|
||||
handler,
|
||||
result.runtime_transpiler_cache,
|
||||
result.source_map,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -993,6 +1001,7 @@ pub const Transpiler = struct {
|
||||
|
||||
keep_json_and_toml_as_one_statement: bool = false,
|
||||
allow_bytecode_cache: bool = false,
|
||||
parse_source_map: bool = false,
|
||||
};
|
||||
|
||||
pub fn parse(
|
||||
@@ -1175,6 +1184,43 @@ pub const Transpiler = struct {
|
||||
.loader = loader,
|
||||
.input_fd = input_fd,
|
||||
.runtime_transpiler_cache = this_parse.runtime_transpiler_cache,
|
||||
.source_map = if (this_parse.parse_source_map and value.sourceMappingURL.text.len > 0) brk: {
|
||||
var arena = bun.ArenaAllocator.init(
|
||||
// Deliberately use default_allocator instead of
|
||||
// this_parse.allocator this_parse.allocator may
|
||||
// be an ArenaAllocator. or it might not be,
|
||||
// depending on the caller Nested arena
|
||||
// allocators cause the allocation to be freed
|
||||
// even later, so it's just wasteful.
|
||||
bun.default_allocator,
|
||||
);
|
||||
defer arena.deinit();
|
||||
|
||||
const reset_scope = js_ast.Expr.Data.Store.disableResetIfNecessary();
|
||||
const reset_scope2 = js_ast.Stmt.Data.Store.disableResetIfNecessary();
|
||||
defer reset_scope.exit();
|
||||
defer reset_scope2.exit();
|
||||
|
||||
if (bun.sourcemap.parseSourceMappingURL(
|
||||
allocator,
|
||||
arena.allocator(),
|
||||
source,
|
||||
value.sourceMappingURL.text,
|
||||
.{
|
||||
.all = .{
|
||||
.line = 0,
|
||||
.column = 0,
|
||||
},
|
||||
},
|
||||
)) |*source_map| {
|
||||
break :brk source_map.map;
|
||||
} else |_| {
|
||||
// TODO: failing to parse the sourcemap shouldn't cause the build to fail.
|
||||
// so we for now just ignore failure.
|
||||
}
|
||||
|
||||
break :brk null;
|
||||
} else null,
|
||||
},
|
||||
.cached => .{
|
||||
.ast = undefined,
|
||||
@@ -1209,8 +1255,7 @@ pub const Transpiler = struct {
|
||||
},
|
||||
};
|
||||
},
|
||||
// TODO: use lazy export AST
|
||||
inline .toml, .json, .jsonc => |kind| {
|
||||
.toml, .json, .jsonc => |kind| {
|
||||
var expr = if (kind == .jsonc)
|
||||
// We allow importing tsconfig.*.json or jsconfig.*.json with comments
|
||||
// These files implicitly become JSONC files, which aligns with the behavior of text editors.
|
||||
@@ -1220,7 +1265,7 @@ pub const Transpiler = struct {
|
||||
else if (kind == .toml)
|
||||
TOML.parse(source, transpiler.log, allocator, false) catch return null
|
||||
else
|
||||
@compileError("unreachable");
|
||||
unreachable;
|
||||
|
||||
var symbols: []js_ast.Symbol = &.{};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user