mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
16 Commits
ciro/fix-a
...
cursor/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4c5d1e066b | ||
|
|
427d633615 | ||
|
|
dcc333e670 | ||
|
|
002cd39fb2 | ||
|
|
04df72a51b | ||
|
|
9a3f80e04c | ||
|
|
60a643cfc1 | ||
|
|
8290c67744 | ||
|
|
5b68849e73 | ||
|
|
7f0ff89609 | ||
|
|
4c976b492e | ||
|
|
c2d9b9a2f9 | ||
|
|
c680490be7 | ||
|
|
3a08090ca8 | ||
|
|
422f1812cd | ||
|
|
9ff17e5660 |
19
packages/bun-types/bun.d.ts
vendored
19
packages/bun-types/bun.d.ts
vendored
@@ -794,7 +794,6 @@ declare module "bun" {
|
||||
path?: string | undefined;
|
||||
syscall?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate an array of typed arrays into a single `ArrayBuffer`. This is a fast path.
|
||||
*
|
||||
@@ -1419,7 +1418,6 @@ declare module "bun" {
|
||||
* @param sql Function to execute SQL queries within the savepoint
|
||||
*/
|
||||
type SQLSavepointContextCallback = (sql: SavepointSQL) => Promise<any> | Array<SQLQuery>;
|
||||
|
||||
/**
|
||||
* Main SQL client interface providing connection and transaction management
|
||||
*/
|
||||
@@ -2129,7 +2127,6 @@ declare module "bun" {
|
||||
path: string;
|
||||
kind: ImportKind;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see [Bun.build API docs](https://bun.sh/docs/bundler#api)
|
||||
*/
|
||||
@@ -2853,7 +2850,6 @@ declare module "bun" {
|
||||
* @link https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/readyState
|
||||
*/
|
||||
type WebSocketReadyState = 0 | 1 | 2 | 3;
|
||||
|
||||
/**
|
||||
* A fast WebSocket designed for servers.
|
||||
*
|
||||
@@ -3654,7 +3650,6 @@ declare module "bun" {
|
||||
errno?: number;
|
||||
syscall?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for TLS connections
|
||||
*/
|
||||
@@ -4437,7 +4432,6 @@ declare module "bun" {
|
||||
* This can be 3.5x faster than `new Uint8Array(size)`, but if you send uninitialized memory to your users (even unintentionally), it can potentially leak anything recently in memory.
|
||||
*/
|
||||
function allocUnsafe(size: number): Uint8Array;
|
||||
|
||||
/**
|
||||
* Options for `Bun.inspect`
|
||||
*/
|
||||
@@ -5222,7 +5216,6 @@ declare module "bun" {
|
||||
* Internally, it calls [nanosleep(2)](https://man7.org/linux/man-pages/man2/nanosleep.2.html)
|
||||
*/
|
||||
function sleepSync(ms: number): void;
|
||||
|
||||
/**
|
||||
* Hash `input` using [SHA-2 512/256](https://en.wikipedia.org/wiki/SHA-2#Comparison_of_SHA_functions)
|
||||
*
|
||||
@@ -5549,6 +5542,10 @@ declare module "bun" {
|
||||
* The source code of the module
|
||||
*/
|
||||
contents: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer;
|
||||
/**
|
||||
* Optional sourcemap (as a string) for the generated contents.
|
||||
*/
|
||||
sourcemap?: string;
|
||||
/**
|
||||
* The loader to use for this file
|
||||
*
|
||||
@@ -5883,7 +5880,6 @@ declare module "bun" {
|
||||
interface HTMLBundle {
|
||||
index: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a TCP or TLS socket connection used for network communication.
|
||||
* This interface provides methods for reading, writing, managing the connection state,
|
||||
@@ -6625,7 +6621,6 @@ declare module "bun" {
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
function listen<Data = undefined>(options: UnixSocketOptions<Data>): UnixSocketListener<Data>;
|
||||
|
||||
/**
|
||||
* @category HTTP & Networking
|
||||
*/
|
||||
@@ -7247,7 +7242,7 @@ declare module "bun" {
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const subprocess = Bun.spawn(["echo", "hello"]);
|
||||
* const subprocess = Bun.spawn({ cmd: ["echo", "hello"] });
|
||||
* ```
|
||||
*/
|
||||
cmd: string[]; // to support dynamically constructed commands
|
||||
@@ -7365,7 +7360,7 @@ declare module "bun" {
|
||||
type WritableSubprocess = Subprocess<"pipe", any, any>;
|
||||
/** Utility type for any process from {@link Bun.spawn()} with stdin, stdout, stderr all set to `"pipe"`. A combination of {@link ReadableSubprocess} and {@link WritableSubprocess} */
|
||||
type PipedSubprocess = Subprocess<"pipe", "pipe", "pipe">;
|
||||
/** Utility type for any process from {@link Bun.spawn()} with stdin, stdout, stderr all set to `null` or similar. */
|
||||
/** Utility type for any process from {@link Bun.spawn()} with stdin, stdout, stderr all set to `null` or similar */
|
||||
type NullSubprocess = Subprocess<
|
||||
"ignore" | "inherit" | null | undefined,
|
||||
"ignore" | "inherit" | null | undefined,
|
||||
@@ -8042,4 +8037,4 @@ declare module "bun" {
|
||||
*/
|
||||
[Symbol.iterator](): IterableIterator<[string, string]>;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,8 @@ exports_ref: Ref = Ref.None,
|
||||
module_ref: Ref = Ref.None,
|
||||
wrapper_ref: Ref = Ref.None,
|
||||
require_ref: Ref = Ref.None,
|
||||
/// Parsed input sourcemap from onLoad plugin, if provided
|
||||
input_sourcemap: ?*bun.sourcemap.ParsedSourceMap = null,
|
||||
top_level_await_keyword: logger.Range,
|
||||
tla_check: TlaCheck = .{},
|
||||
|
||||
|
||||
@@ -729,6 +729,7 @@ pub const JSBundler = struct {
|
||||
success: struct {
|
||||
source_code: []const u8 = "",
|
||||
loader: options.Loader = .file,
|
||||
sourcemap: ?[]const u8 = null,
|
||||
},
|
||||
pending,
|
||||
no_match,
|
||||
@@ -739,6 +740,7 @@ pub const JSBundler = struct {
|
||||
switch (this.*) {
|
||||
.success => |success| {
|
||||
bun.default_allocator.free(success.source_code);
|
||||
// sourcemap memory is managed later after parsing
|
||||
},
|
||||
.err => |*err| {
|
||||
err.deinit(bun.default_allocator);
|
||||
@@ -817,6 +819,7 @@ pub const JSBundler = struct {
|
||||
_: *anyopaque,
|
||||
source_code_value: JSValue,
|
||||
loader_as_int: JSValue,
|
||||
sourcemap_value: JSValue,
|
||||
) void {
|
||||
JSC.markBinding(@src());
|
||||
if (source_code_value.isEmptyOrUndefinedOrNull() or loader_as_int.isEmptyOrUndefinedOrNull()) {
|
||||
@@ -841,10 +844,18 @@ pub const JSBundler = struct {
|
||||
|
||||
@panic("Unexpected: source_code is not a string");
|
||||
};
|
||||
|
||||
var maybe_sourcemap: ?[]const u8 = null;
|
||||
if (!sourcemap_value.isEmptyOrUndefinedOrNull()) {
|
||||
if (sourcemap_value.isString()) {
|
||||
maybe_sourcemap = sourcemap_value.toSliceCloneWithAllocator(global, bun.default_allocator);
|
||||
}
|
||||
}
|
||||
this.value = .{
|
||||
.success = .{
|
||||
.loader = options.Loader.fromAPI(loader),
|
||||
.source_code = source_code,
|
||||
.sourcemap = maybe_sourcemap,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ extern "C" void OnBeforeParseResult__reset(OnBeforeParseResult* result);
|
||||
|
||||
/// These are callbacks defined in Zig and to be run after their associated JS version is run
|
||||
extern "C" void JSBundlerPlugin__addError(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onLoadAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onLoadAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onResolveAsync(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" void JSBundlerPlugin__onVirtualModulePlugin(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
extern "C" JSC::EncodedJSValue JSBundlerPlugin__onDefer(void*, JSC::JSGlobalObject*);
|
||||
@@ -111,7 +111,7 @@ bool BundlerPlugin::anyMatchesCrossThread(JSC::VM& vm, const BunString* namespac
|
||||
static const HashTableValue JSBundlerPluginHashTable[] = {
|
||||
{ "addFilter"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_addFilter, 3 } },
|
||||
{ "addError"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_addError, 3 } },
|
||||
{ "onLoadAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onLoadAsync, 3 } },
|
||||
{ "onLoadAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onLoadAsync, 4 } },
|
||||
{ "onResolveAsync"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onResolveAsync, 4 } },
|
||||
{ "onBeforeParse"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_onBeforeParse, 4 } },
|
||||
{ "generateDeferPromise"_s, static_cast<unsigned>(JSC::PropertyAttribute::Function | JSC::PropertyAttribute::ReadOnly | JSC::PropertyAttribute::DontDelete), NoIntrinsic, { HashTableValue::NativeFunctionType, jsBundlerPluginFunction_generateDeferPromise, 0 } },
|
||||
@@ -402,7 +402,8 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_onLoadAsync, (JSC::JSGlobalObje
|
||||
UNWRAP_BUNDLER_PLUGIN(callFrame),
|
||||
thisObject->plugin.config,
|
||||
JSValue::encode(callFrame->argument(1)),
|
||||
JSValue::encode(callFrame->argument(2)));
|
||||
JSValue::encode(callFrame->argument(2)),
|
||||
JSValue::encode(callFrame->argument(3)));
|
||||
}
|
||||
|
||||
return JSC::JSValue::encode(JSC::jsUndefined());
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
#include <JavaScriptCore/Yarr.h>
|
||||
|
||||
typedef void (*JSBundlerPluginAddErrorCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginOnResolveAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue);
|
||||
typedef void (*JSBundlerPluginNativeOnBeforeParseCallback)(const OnBeforeParseArguments*, OnBeforeParseResult*);
|
||||
|
||||
|
||||
@@ -652,19 +652,16 @@ pub const LinkerContext = struct {
|
||||
|
||||
const sources = c.parse_graph.input_files.items(.source);
|
||||
const quoted_source_map_contents = c.graph.files.items(.quoted_source_contents);
|
||||
const input_source_maps = c.parse_graph.ast.items(.input_sourcemap);
|
||||
|
||||
// Entries in `results` do not 1:1 map to source files, the mapping
|
||||
// is actually many to one, where a source file can have multiple chunks
|
||||
// in the sourcemap.
|
||||
//
|
||||
// This hashmap is going to map:
|
||||
// `source_index` (per compilation) in a chunk
|
||||
// -->
|
||||
// Which source index in the generated sourcemap, referred to
|
||||
// as the "mapping source index" within this function to be distinct.
|
||||
// Map from "intermediate" source_index -> base index in the final sources array
|
||||
var source_id_map = std.AutoArrayHashMap(u32, i32).init(worker.allocator);
|
||||
defer source_id_map.deinit();
|
||||
|
||||
// Collect the JSON-encoded strings we will emit in "sourcesContent"
|
||||
var sources_content_values = std.ArrayList([]const u8).init(worker.allocator);
|
||||
defer sources_content_values.deinit();
|
||||
|
||||
const source_indices = results.items(.source_index);
|
||||
|
||||
j.pushStatic(
|
||||
@@ -672,41 +669,71 @@ pub const LinkerContext = struct {
|
||||
\\ "version": 3,
|
||||
\\ "sources": [
|
||||
);
|
||||
if (source_indices.len > 0) {
|
||||
{
|
||||
const index = source_indices[0];
|
||||
var path = sources[index].path;
|
||||
try source_id_map.putNoClobber(index, 0);
|
||||
|
||||
var next_mapping_source_index: i32 = 0;
|
||||
var is_first_source = true;
|
||||
|
||||
for (source_indices) |src_index| {
|
||||
const gop = try source_id_map.getOrPut(src_index);
|
||||
if (gop.found_existing) continue; // already processed this file
|
||||
|
||||
// Determine base index for this file in the aggregated sources array
|
||||
gop.value_ptr.* = next_mapping_source_index;
|
||||
|
||||
// Check if the AST for this file had an input sourcemap
|
||||
if (input_source_maps[src_index]) |sm_ptr| {
|
||||
const sm = sm_ptr;
|
||||
|
||||
// Resolve the list of source names from the nested sourcemap
|
||||
if (sm.external_source_names.len > 0) {
|
||||
// Use every external source name verbatim
|
||||
for (sm.external_source_names) |name| {
|
||||
if (!is_first_source) j.pushStatic(", ");
|
||||
var qb = try MutableString.init(worker.allocator, name.len + 2);
|
||||
qb = try js_printer.quoteForJSON(name, qb, false);
|
||||
j.pushStatic(qb.list.items);
|
||||
|
||||
try sources_content_values.append("null");
|
||||
|
||||
next_mapping_source_index += 1;
|
||||
is_first_source = false;
|
||||
}
|
||||
} else {
|
||||
// No external names recorded – fall back to the original file name
|
||||
var path = sources[src_index].path;
|
||||
if (path.isFile()) {
|
||||
const rel = try std.fs.path.relative(worker.allocator, chunk_abs_dir, path.text);
|
||||
path.pretty = rel;
|
||||
}
|
||||
|
||||
if (!is_first_source) j.pushStatic(", ");
|
||||
var qb = try MutableString.init(worker.allocator, path.pretty.len + 2);
|
||||
qb = try js_printer.quoteForJSON(path.pretty, qb, false);
|
||||
j.pushStatic(qb.list.items);
|
||||
|
||||
try sources_content_values.append("null");
|
||||
|
||||
next_mapping_source_index += 1;
|
||||
is_first_source = false;
|
||||
}
|
||||
} else {
|
||||
// No nested map – use the file's own path
|
||||
var path = sources[src_index].path;
|
||||
if (path.isFile()) {
|
||||
const rel_path = try std.fs.path.relative(worker.allocator, chunk_abs_dir, path.text);
|
||||
path.pretty = rel_path;
|
||||
const rel = try std.fs.path.relative(worker.allocator, chunk_abs_dir, path.text);
|
||||
path.pretty = rel;
|
||||
}
|
||||
|
||||
var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + 2);
|
||||
quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false);
|
||||
j.pushStatic(quote_buf.list.items); // freed by arena
|
||||
}
|
||||
if (!is_first_source) j.pushStatic(", ");
|
||||
var qb = try MutableString.init(worker.allocator, path.pretty.len + 2);
|
||||
qb = try js_printer.quoteForJSON(path.pretty, qb, false);
|
||||
j.pushStatic(qb.list.items);
|
||||
|
||||
var next_mapping_source_index: i32 = 1;
|
||||
for (source_indices[1..]) |index| {
|
||||
const gop = try source_id_map.getOrPut(index);
|
||||
if (gop.found_existing) continue;
|
||||
// Re-use the already-quoted contents we have for this source file
|
||||
try sources_content_values.append(quoted_source_map_contents[src_index]);
|
||||
|
||||
gop.value_ptr.* = next_mapping_source_index;
|
||||
next_mapping_source_index += 1;
|
||||
|
||||
var path = sources[index].path;
|
||||
|
||||
if (path.isFile()) {
|
||||
const rel_path = try std.fs.path.relative(worker.allocator, chunk_abs_dir, path.text);
|
||||
path.pretty = rel_path;
|
||||
}
|
||||
|
||||
var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + ", ".len + 2);
|
||||
quote_buf.appendAssumeCapacity(", ");
|
||||
quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false);
|
||||
j.pushStatic(quote_buf.list.items); // freed by arena
|
||||
is_first_source = false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -715,16 +742,16 @@ pub const LinkerContext = struct {
|
||||
\\ "sourcesContent": [
|
||||
);
|
||||
|
||||
const source_indices_for_contents = source_id_map.keys();
|
||||
if (source_indices_for_contents.len > 0) {
|
||||
if (sources_content_values.items.len > 0) {
|
||||
j.pushStatic("\n ");
|
||||
j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]]);
|
||||
j.pushStatic(sources_content_values.items[0]);
|
||||
|
||||
for (source_indices_for_contents[1..]) |index| {
|
||||
for (sources_content_values.items[1..]) |val| {
|
||||
j.pushStatic(",\n ");
|
||||
j.pushStatic(quoted_source_map_contents[index]);
|
||||
j.pushStatic(val);
|
||||
}
|
||||
}
|
||||
|
||||
j.pushStatic(
|
||||
\\
|
||||
\\ ],
|
||||
|
||||
@@ -13,6 +13,8 @@ pub const ParseTask = @This();
|
||||
path: Fs.Path,
|
||||
secondary_path_for_commonjs_interop: ?Fs.Path = null,
|
||||
contents_or_fd: ContentsOrFd,
|
||||
/// Optional raw sourcemap string provided by an onLoad plugin
|
||||
input_sourcemap: ?[]const u8 = null,
|
||||
external_free_function: CacheEntry.ExternalFreeFunction = .none,
|
||||
side_effects: _resolver.SideEffects,
|
||||
loader: ?Loader = null,
|
||||
@@ -118,6 +120,7 @@ pub fn init(resolve_result: *const _resolver.Result, source_index: Index, ctx: *
|
||||
},
|
||||
},
|
||||
.side_effects = resolve_result.primary_side_effects_data,
|
||||
.input_sourcemap = null,
|
||||
.jsx = resolve_result.jsx,
|
||||
.source_index = source_index,
|
||||
.module_type = resolve_result.module_type,
|
||||
@@ -258,6 +261,7 @@ fn getRuntimeSourceComptime(comptime target: options.Target) RuntimeSource {
|
||||
.contents_or_fd = .{
|
||||
.contents = runtime_code,
|
||||
},
|
||||
.input_sourcemap = null,
|
||||
.source_index = Index.runtime,
|
||||
.loader = .js,
|
||||
.known_target = target,
|
||||
@@ -1230,6 +1234,24 @@ fn runWithSourceCode(
|
||||
),
|
||||
};
|
||||
|
||||
// Parse and attach plugin-provided sourcemap, if present
|
||||
if (task.input_sourcemap) |smap_bytes| {
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
const parse_result = bun.sourcemap.parseJSON(
|
||||
allocator,
|
||||
arena.allocator(),
|
||||
smap_bytes,
|
||||
.{ .mappings_only = {} },
|
||||
) catch null;
|
||||
|
||||
if (parse_result) |pr| {
|
||||
if (pr.map) |map_ptr| {
|
||||
ast.input_sourcemap = map_ptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast.target = target;
|
||||
if (ast.parts.len <= 1 and ast.css == null and (task.loader == null or task.loader.? != .html)) {
|
||||
task.side_effects = .no_side_effects__empty_ast;
|
||||
|
||||
@@ -1974,6 +1974,7 @@ pub const BundleV2 = struct {
|
||||
this.graph.input_files.items(.is_plugin_file)[load.source_index.get()] = true;
|
||||
var parse_task = load.parse_task;
|
||||
parse_task.loader = code.loader;
|
||||
parse_task.input_sourcemap = code.sourcemap;
|
||||
if (!should_copy_for_bundling) this.free_list.append(code.source_code) catch unreachable;
|
||||
parse_task.contents_or_fd = .{
|
||||
.contents = code.source_code,
|
||||
@@ -2132,6 +2133,7 @@ pub const BundleV2 = struct {
|
||||
.file = bun.invalid_fd,
|
||||
},
|
||||
},
|
||||
.input_sourcemap = null,
|
||||
.side_effects = .has_side_effects,
|
||||
.jsx = this.transpilerForTarget(resolve.import_record.original_target).options.jsx,
|
||||
.source_index = source_index,
|
||||
|
||||
@@ -1,6 +1,49 @@
|
||||
import type { BuildConfig, BunPlugin, OnLoadCallback, OnResolveCallback, PluginBuilder, PluginConstraints } from "bun";
|
||||
/// <reference types="bun-types" />
|
||||
|
||||
/*
|
||||
* bun-types publishes only global declarations. Importing the package as a
|
||||
* module produces "is not a module" errors, so we just reference it above to
|
||||
* pull the ambient names into scope.
|
||||
*/
|
||||
|
||||
// Globals used by this built-in that aren't part of the DOM lib
|
||||
// but are provided at runtime by Bun. Declaring them here silences TS.
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
declare var process: any;
|
||||
declare function require(path: string): any;
|
||||
|
||||
import type {
|
||||
BuildConfig,
|
||||
BunPlugin,
|
||||
PluginBuilder,
|
||||
PluginConstraints,
|
||||
OnLoadCallback,
|
||||
OnResolveCallback,
|
||||
} from "bun";
|
||||
|
||||
// Globals provided at runtime (not in lib.dom)
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
declare var process: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
declare function require(path: string): any;
|
||||
|
||||
type AnyFunction = (...args: any[]) => any;
|
||||
|
||||
interface BuildConfigExt extends BuildConfig {
|
||||
/** project root passed from Bun */
|
||||
root?: string;
|
||||
/** compilation target */
|
||||
target?: "browser" | "bun" | "node";
|
||||
/** plugin array is never null inside bundler */
|
||||
plugins: BunPlugin[];
|
||||
/** esbuild-style alias */
|
||||
entryPoints?: string[];
|
||||
/** legacy lowercase alias */
|
||||
entrypoints?: string[];
|
||||
/** minify options forwarded from build api */
|
||||
minify?: BuildConfig["minify"];
|
||||
}
|
||||
|
||||
/**
|
||||
* @see `JSBundlerPlugin.h`
|
||||
*/
|
||||
@@ -12,6 +55,7 @@ interface BundlerPlugin {
|
||||
internalID,
|
||||
sourceCode: string | Uint8Array | ArrayBuffer | DataView | null,
|
||||
loaderKey: number | null,
|
||||
sourcemap: string | null,
|
||||
): void;
|
||||
/** Binding to `JSBundlerPlugin__onResolveAsync` */
|
||||
onResolveAsync(internalID, a, b, c): void;
|
||||
@@ -27,13 +71,12 @@ interface BundlerPlugin {
|
||||
|
||||
// Extra types
|
||||
type Setup = BunPlugin["setup"];
|
||||
type MinifyObj = Exclude<BuildConfig["minify"], boolean>;
|
||||
interface BuildConfigExt extends BuildConfig {
|
||||
// we support esbuild-style 'entryPoints' capitalization
|
||||
entryPoints?: string[];
|
||||
// plugins is guaranteed to not be null
|
||||
plugins: BunPlugin[];
|
||||
}
|
||||
type MinifyObj = {
|
||||
identifiers?: boolean;
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
};
|
||||
|
||||
interface PluginBuilderExt extends PluginBuilder {
|
||||
resolve: AnyFunction;
|
||||
onEnd: AnyFunction;
|
||||
@@ -55,11 +98,10 @@ export function loadAndResolvePluginsForServe(
|
||||
runSetupFn: typeof runSetupFunction,
|
||||
) {
|
||||
// Same config as created in HTMLBundle.init
|
||||
let config: BuildConfigExt = {
|
||||
experimentalCss: true,
|
||||
experimentalHtml: true,
|
||||
target: "browser",
|
||||
const config: BuildConfigExt = {
|
||||
root: bunfig_folder,
|
||||
target: "browser",
|
||||
plugins: [],
|
||||
};
|
||||
|
||||
class InvalidBundlerPluginError extends TypeError {
|
||||
@@ -129,7 +171,13 @@ export function runSetupFunction(
|
||||
[RegExp, napiModule: unknown, symbol: string, external?: undefined | unknown][]
|
||||
>();
|
||||
|
||||
function validate(filterObject: PluginConstraints, callback, map, symbol, external) {
|
||||
function validate(
|
||||
filterObject: PluginConstraints,
|
||||
callback: OnLoadCallback | OnResolveCallback | unknown,
|
||||
map: Map<string, any>,
|
||||
symbol?: string,
|
||||
external?: unknown,
|
||||
): void {
|
||||
if (!filterObject || !$isObject(filterObject)) {
|
||||
throw new TypeError('Expected an object with "filter" RegExp');
|
||||
}
|
||||
@@ -458,7 +506,7 @@ export function runOnLoadPlugins(
|
||||
var promiseResult = (async (internalID, path, namespace, isServerSide, defaultLoader, generateDefer) => {
|
||||
var results = this.onLoad.$get(namespace);
|
||||
if (!results) {
|
||||
this.onLoadAsync(internalID, null, null);
|
||||
this.onLoadAsync(internalID, null, null, null);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -490,7 +538,7 @@ export function runOnLoadPlugins(
|
||||
continue;
|
||||
}
|
||||
|
||||
var { contents, loader = defaultLoader } = result as any;
|
||||
var { contents, loader = defaultLoader, sourcemap } = result as any;
|
||||
if ((loader as any) === "object") {
|
||||
if (!("exports" in result)) {
|
||||
throw new TypeError('onLoad plugin returning loader: "object" must have "exports" property');
|
||||
@@ -516,12 +564,12 @@ export function runOnLoadPlugins(
|
||||
throw new TypeError(`Loader ${loader} is not supported.`);
|
||||
}
|
||||
|
||||
this.onLoadAsync(internalID, contents as any, chosenLoader);
|
||||
this.onLoadAsync(internalID, contents as any, chosenLoader, sourcemap ?? null);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
this.onLoadAsync(internalID, null, null);
|
||||
this.onLoadAsync(internalID, null, null, null);
|
||||
return null;
|
||||
})(internalID, path, namespace, isServerSide, loaderName, generateDefer);
|
||||
|
||||
|
||||
@@ -674,7 +674,7 @@ fn NewPrinter(
|
||||
|
||||
has_printed_bundled_import_statement: bool = false,
|
||||
|
||||
renamer: rename.Renamer,
|
||||
renamer: bun.renamer.Renamer,
|
||||
prev_stmt_tag: Stmt.Tag = .s_empty,
|
||||
source_map_builder: SourceMap.Chunk.Builder = undefined,
|
||||
|
||||
@@ -5807,6 +5807,8 @@ pub fn printAst(
|
||||
renamer,
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, ascii_only, opts, source, &tree),
|
||||
);
|
||||
printer.source_map_builder.input_source_map = ast.input_sourcemap;
|
||||
|
||||
defer {
|
||||
if (comptime generate_source_map) {
|
||||
printer.source_map_builder.line_offset_tables.deinit(opts.allocator);
|
||||
@@ -6002,6 +6004,7 @@ pub fn printWithWriterAndPlatform(
|
||||
renamer,
|
||||
getSourceMapBuilder(if (generate_source_maps) .eager else .disable, is_bun_platform, opts, source, &ast),
|
||||
);
|
||||
printer.source_map_builder.input_source_map = ast.input_sourcemap;
|
||||
printer.was_lazy_export = ast.has_lazy_export;
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
@@ -6083,6 +6086,7 @@ pub fn printCommonJS(
|
||||
renamer.toRenamer(),
|
||||
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, false, opts, source, &tree),
|
||||
);
|
||||
printer.source_map_builder.input_source_map = tree.input_sourcemap;
|
||||
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
|
||||
printer.binary_expression_stack = std.ArrayList(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
|
||||
defer printer.binary_expression_stack.clearAndFree();
|
||||
|
||||
78
test/bundler/plugin_sourcemap.test.ts
Normal file
78
test/bundler/plugin_sourcemap.test.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled } from "./expectBundled";
|
||||
|
||||
function makePlugin() {
|
||||
return function (builder: any) {
|
||||
builder.onLoad({ filter: /\.foo$/ }, async (args: any) => {
|
||||
const text = await Bun.file(args.path).text();
|
||||
const js = `export const msg = ${JSON.stringify(text)};`;
|
||||
|
||||
// Very small identity sourcemap (each line maps to the same line/column in the original file)
|
||||
const lineCount = js.split("\n").length;
|
||||
const mappings = new Array(lineCount).fill("AAAA").join(";");
|
||||
const sourcemap = JSON.stringify({
|
||||
version: 3,
|
||||
sources: [args.path],
|
||||
names: [],
|
||||
mappings,
|
||||
sourcesContent: [text],
|
||||
});
|
||||
|
||||
return {
|
||||
contents: js,
|
||||
loader: "js",
|
||||
sourcemap,
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
describe("bundler – plugin sourcemap", () => {
|
||||
itBundled("plugin/SourcemapBasic", {
|
||||
files: {
|
||||
"index.ts": /* ts */ `
|
||||
import { msg } from "./hello.foo";
|
||||
console.log(msg);
|
||||
`,
|
||||
"hello.foo": `Hello, World!`,
|
||||
},
|
||||
plugins: makePlugin(),
|
||||
sourceMap: "external",
|
||||
run: {
|
||||
stdout: "Hello, World!",
|
||||
},
|
||||
});
|
||||
|
||||
// Error reporting should point back to original .foo line
|
||||
itBundled("plugin/SourcemapErrorLine", {
|
||||
files: {
|
||||
"index.ts": /* ts */ `import './boom.foo'`,
|
||||
"boom.foo": `
|
||||
// line 1 (comment)
|
||||
throw new Error('bad things'); // line 2
|
||||
`,
|
||||
},
|
||||
plugins: makePlugin(),
|
||||
sourceMap: "external",
|
||||
run: {
|
||||
error: "Error: bad things",
|
||||
errorLineMatch: /throw new Error\('bad things'\)/,
|
||||
},
|
||||
});
|
||||
|
||||
// Ensure minified builds still respect plugin sourcemaps
|
||||
itBundled("plugin/SourcemapMinified", {
|
||||
files: {
|
||||
"index.ts": `import { msg } from './msg.foo'; console.log(msg);`,
|
||||
"msg.foo": `minified!`,
|
||||
},
|
||||
plugins: makePlugin(),
|
||||
sourceMap: "external",
|
||||
minifySyntax: true,
|
||||
minifyIdentifiers: true,
|
||||
minifyWhitespace: true,
|
||||
run: {
|
||||
stdout: "minified!",
|
||||
},
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user