Refactor: Extract ModuleLoader components into separate files (#24083)

## Summary

Split `ModuleLoader.zig` into smaller, more focused modules for better
code organization and maintainability:

- `AsyncModule` → `src/bun.js/AsyncModule.zig` (lines 69-806)
- `RuntimeTranspilerStore` → `src/bun.js/RuntimeTranspilerStore.zig`
(lines 2028-2606)
- `HardcodedModule` → `src/bun.js/HardcodedModule.zig` (lines 2618-3040)

## Changes

- Extracted three large components from `ModuleLoader.zig` into separate
files
- Updated imports in all affected files
- Made necessary functions/constants public (`dumpSource`,
`dumpSourceString`, `setBreakPointOnFirstLine`, `bun_aliases`)
- Updated `ModuleLoader.zig` to import the new modules

## Testing

- Build passes successfully (`bun bd`)
- Basic module loading verified with smoke tests
- Existing resolve tests continue to pass

🤖 Generated with [Claude Code](https://claude.com/claude-code)

---------

Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
robobun
2025-10-25 20:43:02 -07:00
committed by GitHub
parent a2b262ed69
commit 3367fa6ae3
4 changed files with 1846 additions and 1751 deletions

781
src/bun.js/AsyncModule.zig Normal file
View File

@@ -0,0 +1,781 @@
const debug = Output.scoped(.AsyncModule, .hidden);
const string = []const u8;
pub const AsyncModule = struct {
// This is all the state used by the printer to print the module
parse_result: ParseResult,
promise: jsc.Strong.Optional = .empty,
path: Fs.Path,
specifier: string = "",
referrer: string = "",
string_buf: []u8 = &[_]u8{},
fd: ?StoredFileDescriptorType = null,
package_json: ?*PackageJSON = null,
loader: api.Loader,
hash: u32 = std.math.maxInt(u32),
globalThis: *JSGlobalObject = undefined,
arena: *bun.ArenaAllocator,
// This is the specific state for making it async
poll_ref: Async.KeepAlive = .{},
any_task: jsc.AnyTask = undefined,
pub const Id = u32;
const PackageDownloadError = struct {
name: []const u8,
resolution: Install.Resolution,
err: anyerror,
url: []const u8,
};
const PackageResolveError = struct {
name: []const u8,
err: anyerror,
url: []const u8,
version: Dependency.Version,
};
pub const Queue = struct {
map: Map = .{},
scheduled: u32 = 0,
concurrent_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0),
const DeferredDependencyError = struct {
dependency: Dependency,
root_dependency_id: Install.DependencyID,
err: anyerror,
};
pub const Map = std.ArrayListUnmanaged(AsyncModule);
pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void {
debug("enqueue: {s}", .{opts.specifier});
var module = AsyncModule.init(opts, globalObject) catch unreachable;
module.poll_ref.ref(this.vm());
this.map.append(this.vm().allocator, module) catch unreachable;
this.vm().packageManager().drainDependencyList();
}
pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.DependencyID, err: anyerror) void {
var this = bun.cast(*Queue, ctx);
debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(&dependency.name)});
var modules: []AsyncModule = this.map.items;
var i: usize = 0;
outer: for (modules) |module_| {
var module = module_;
const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
for (root_dependency_ids, 0..) |dep, dep_i| {
if (dep != root_dependency_id) continue;
module.resolveError(
this.vm(),
module.parse_result.pending_imports.items(.import_record_id)[dep_i],
.{
.name = this.vm().packageManager().lockfile.str(&dependency.name),
.err = err,
.url = "",
.version = dependency.version,
},
) catch unreachable;
continue :outer;
}
modules[i] = module;
i += 1;
}
this.map.items.len = i;
}
pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void {
debug("onWake", .{});
var this = bun.cast(*Queue, ctx);
this.vm().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(this));
}
pub fn onPoll(this: *Queue) void {
debug("onPoll", .{});
this.runTasks();
this.pollModules();
}
pub fn runTasks(this: *Queue) void {
var pm = this.vm().packageManager();
if (Output.enable_ansi_colors_stderr) {
pm.startProgressBarIfNone();
pm.runTasks(
*Queue,
this,
.{
.onExtract = {},
.onResolve = onResolve,
.onPackageManifestError = onPackageManifestError,
.onPackageDownloadError = onPackageDownloadError,
.progress_bar = true,
},
true,
PackageManager.Options.LogLevel.default,
) catch unreachable;
} else {
pm.runTasks(
*Queue,
this,
.{
.onExtract = {},
.onResolve = onResolve,
.onPackageManifestError = onPackageManifestError,
.onPackageDownloadError = onPackageDownloadError,
},
true,
PackageManager.Options.LogLevel.default_no_progress,
) catch unreachable;
}
}
pub fn onResolve(_: *Queue) void {
debug("onResolve", .{});
}
pub fn onPackageManifestError(
this: *Queue,
name: []const u8,
err: anyerror,
url: []const u8,
) void {
debug("onPackageManifestError: {s}", .{name});
var modules: []AsyncModule = this.map.items;
var i: usize = 0;
outer: for (modules) |module_| {
var module = module_;
const tags = module.parse_result.pending_imports.items(.tag);
for (tags, 0..) |tag, tag_i| {
if (tag == .resolve) {
const esms = module.parse_result.pending_imports.items(.esm);
const esm = esms[tag_i];
const string_bufs = module.parse_result.pending_imports.items(.string_buf);
if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue;
const versions = module.parse_result.pending_imports.items(.dependency);
module.resolveError(
this.vm(),
module.parse_result.pending_imports.items(.import_record_id)[tag_i],
.{
.name = name,
.err = err,
.url = url,
.version = versions[tag_i],
},
) catch unreachable;
continue :outer;
}
}
modules[i] = module;
i += 1;
}
this.map.items.len = i;
}
pub fn onPackageDownloadError(
this: *Queue,
package_id: Install.PackageID,
name: []const u8,
resolution: *const Install.Resolution,
err: anyerror,
url: []const u8,
) void {
debug("onPackageDownloadError: {s}", .{name});
const resolution_ids = this.vm().packageManager().lockfile.buffers.resolutions.items;
var modules: []AsyncModule = this.map.items;
var i: usize = 0;
outer: for (modules) |module_| {
var module = module_;
const record_ids = module.parse_result.pending_imports.items(.import_record_id);
const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
for (root_dependency_ids, 0..) |dependency_id, import_id| {
if (resolution_ids[dependency_id] != package_id) continue;
module.downloadError(
this.vm(),
record_ids[import_id],
.{
.name = name,
.resolution = resolution.*,
.err = err,
.url = url,
},
) catch unreachable;
continue :outer;
}
modules[i] = module;
i += 1;
}
this.map.items.len = i;
}
pub fn pollModules(this: *Queue) void {
var pm = this.vm().packageManager();
if (pm.pending_tasks.load(.monotonic) > 0) return;
var modules: []AsyncModule = this.map.items;
var i: usize = 0;
for (modules) |mod| {
var module = mod;
var tags = module.parse_result.pending_imports.items(.tag);
const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id);
// var esms = module.parse_result.pending_imports.items(.esm);
// var versions = module.parse_result.pending_imports.items(.dependency);
var done_count: usize = 0;
for (tags, 0..) |tag, tag_i| {
const root_id = root_dependency_ids[tag_i];
const resolution_ids = pm.lockfile.buffers.resolutions.items;
if (root_id >= resolution_ids.len) continue;
const package_id = resolution_ids[root_id];
switch (tag) {
.resolve => {
if (package_id == Install.invalid_package_id) {
continue;
}
// if we get here, the package has already been resolved.
tags[tag_i] = .download;
},
.download => {
if (package_id == Install.invalid_package_id) {
unreachable;
}
},
.done => {
done_count += 1;
continue;
},
}
if (package_id == Install.invalid_package_id) {
continue;
}
const package = pm.lockfile.packages.get(package_id);
bun.assert(package.resolution.tag != .root);
var name_and_version_hash: ?u64 = null;
var patchfile_hash: ?u64 = null;
switch (pm.determinePreinstallState(package, pm.lockfile, &name_and_version_hash, &patchfile_hash)) {
.done => {
// we are only truly done if all the dependencies are done.
const current_tasks = pm.total_tasks;
// so if enqueuing all the dependencies produces no new tasks, we are done.
pm.enqueueDependencyList(package.dependencies);
if (current_tasks == pm.total_tasks) {
tags[tag_i] = .done;
done_count += 1;
}
},
.extracting => {
// we are extracting the package
// we need to wait for the next poll
continue;
},
.extract => {},
else => {},
}
}
if (done_count == tags.len) {
module.done(this.vm());
} else {
modules[i] = module;
i += 1;
}
}
this.map.items.len = i;
if (i == 0) {
// ensure we always end the progress bar
this.vm().packageManager().endProgressBar();
}
}
pub fn vm(this: *Queue) *VirtualMachine {
return @alignCast(@fieldParentPtr("modules", this));
}
comptime {
// Ensure VirtualMachine has a field named "modules" of the correct type
// If this fails, the @fieldParentPtr in vm() above needs to be updated
const VM = @import("./VirtualMachine.zig");
if (!@hasField(VM, "modules")) {
@compileError("VirtualMachine must have a 'modules' field for AsyncModule.Queue.vm() to work");
}
}
};
pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule {
// var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice();
// var expr_blocks = js_ast.Expr.Data.toOwnedSlice();
const this_promise = JSValue.createInternalPromise(globalObject);
const promise = jsc.Strong.Optional.create(this_promise, globalObject);
var buf = bun.StringBuilder{};
buf.count(opts.referrer);
buf.count(opts.specifier);
buf.count(opts.path.text);
try buf.allocate(bun.default_allocator);
opts.promise_ptr.?.* = this_promise.asInternalPromise().?;
const referrer = buf.append(opts.referrer);
const specifier = buf.append(opts.specifier);
const path = Fs.Path.init(buf.append(opts.path.text));
return AsyncModule{
.parse_result = opts.parse_result,
.promise = promise,
.path = path,
.specifier = specifier,
.referrer = referrer,
.fd = opts.fd,
.package_json = opts.package_json,
.loader = opts.loader.toAPI(),
.string_buf = buf.allocatedSlice(),
// .stmt_blocks = stmt_blocks,
// .expr_blocks = expr_blocks,
.globalThis = globalObject,
.arena = opts.arena,
};
}
pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void {
var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable;
clone.* = this.*;
jsc_vm.modules.scheduled += 1;
clone.any_task = jsc.AnyTask.New(AsyncModule, onDone).init(clone);
jsc_vm.enqueueTask(jsc.Task.init(&clone.any_task));
}
pub fn onDone(this: *AsyncModule) void {
jsc.markBinding(@src());
var jsc_vm = this.globalThis.bunVM();
jsc_vm.modules.scheduled -= 1;
if (jsc_vm.modules.scheduled == 0) {
jsc_vm.packageManager().endProgressBar();
}
var log = logger.Log.init(jsc_vm.allocator);
defer log.deinit();
var errorable: jsc.ErrorableResolvedSource = undefined;
this.poll_ref.unref(jsc_vm);
outer: {
errorable = jsc.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| {
switch (err) {
error.JSError => {
errorable = .err(error.JSError, this.globalThis.takeError(error.JSError));
break :outer;
},
else => {
VirtualMachine.processFetchLog(
this.globalThis,
bun.String.init(this.specifier),
bun.String.init(this.referrer),
&log,
&errorable,
err,
);
break :outer;
},
}
});
}
var spec = bun.String.init(ZigString.init(this.specifier).withEncoding());
var ref = bun.String.init(ZigString.init(this.referrer).withEncoding());
bun.jsc.fromJSHostCallGeneric(this.globalThis, @src(), Bun__onFulfillAsyncModule, .{
this.globalThis,
this.promise.get().?,
&errorable,
&spec,
&ref,
}) catch {};
this.deinit();
jsc_vm.allocator.destroy(this);
}
pub fn fulfill(
globalThis: *JSGlobalObject,
promise: JSValue,
resolved_source: *ResolvedSource,
err: ?anyerror,
specifier_: bun.String,
referrer_: bun.String,
log: *logger.Log,
) bun.JSError!void {
jsc.markBinding(@src());
var specifier = specifier_;
var referrer = referrer_;
var scope: jsc.CatchScope = undefined;
scope.init(globalThis, @src());
defer {
specifier.deref();
referrer.deref();
scope.deinit();
}
var errorable: jsc.ErrorableResolvedSource = undefined;
if (err) |e| {
defer {
if (resolved_source.source_code_needs_deref) {
resolved_source.source_code_needs_deref = false;
resolved_source.source_code.deref();
}
}
if (e == error.JSError) {
errorable = jsc.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError));
} else {
VirtualMachine.processFetchLog(
globalThis,
specifier,
referrer,
log,
&errorable,
e,
);
}
} else {
errorable = jsc.ErrorableResolvedSource.ok(resolved_source.*);
}
log.deinit();
debug("fulfill: {any}", .{specifier});
try bun.jsc.fromJSHostCallGeneric(globalThis, @src(), Bun__onFulfillAsyncModule, .{
globalThis,
promise,
&errorable,
&specifier,
&referrer,
});
}
pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void {
const globalThis = this.globalThis;
const msg: []u8 = try switch (result.err) {
error.PackageManifestHTTP400 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 400 while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.PackageManifestHTTP401 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 401 while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.PackageManifestHTTP402 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 402 while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.PackageManifestHTTP403 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 403 while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.PackageManifestHTTP404 => std.fmt.allocPrint(
bun.default_allocator,
"Package '{s}' was not found",
.{result.name},
),
error.PackageManifestHTTP4xx => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 4xx while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.PackageManifestHTTP5xx => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 5xx while resolving package '{s}' at '{s}'",
.{ result.name, result.url },
),
error.DistTagNotFound, error.NoMatchingVersion => brk: {
const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.version.isExact())
"Version not found"
else if (result.version.tag == .npm and !result.version.value.npm.version.isExact())
"No matching version found"
else
"No match found";
break :brk std.fmt.allocPrint(
bun.default_allocator,
"{s} '{s}' for package '{s}' (but package exists)",
.{ prefix, vm.packageManager().lockfile.str(&result.version.literal), result.name },
);
},
else => |err| std.fmt.allocPrint(
bun.default_allocator,
"{s} resolving package '{s}' at '{s}'",
.{ bun.asByteSlice(@errorName(err)), result.name, result.url },
),
};
defer bun.default_allocator.free(msg);
const name: []const u8 = switch (result.err) {
error.NoMatchingVersion => "PackageVersionNotFound",
error.DistTagNotFound => "PackageTagNotFound",
error.PackageManifestHTTP403 => "PackageForbidden",
error.PackageManifestHTTP404 => "PackageNotFound",
else => "PackageResolveError",
};
var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis);
if (result.url.len > 0)
error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toJS(globalThis));
const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?;
error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line));
if (location.line_text) |line_text| {
error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis));
}
error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column));
if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) {
error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toJS(globalThis));
}
const promise_value = this.promise.swap();
var promise = promise_value.asInternalPromise().?;
promise_value.ensureStillAlive();
this.poll_ref.unref(vm);
this.deinit();
promise.rejectAsHandled(globalThis, error_instance);
}
pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void {
const globalThis = this.globalThis;
const msg_args = .{
result.name,
result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any),
};
const msg: []u8 = try switch (result.err) {
error.TarballHTTP400 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 400 downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP401 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 401 downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP402 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 402 downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP403 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 403 downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP404 => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 404 downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP4xx => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 4xx downloading package '{s}@{any}'",
msg_args,
),
error.TarballHTTP5xx => std.fmt.allocPrint(
bun.default_allocator,
"HTTP 5xx downloading package '{s}@{any}'",
msg_args,
),
error.TarballFailedToExtract => std.fmt.allocPrint(
bun.default_allocator,
"Failed to extract tarball for package '{s}@{any}'",
msg_args,
),
else => |err| std.fmt.allocPrint(
bun.default_allocator,
"{s} downloading package '{s}@{any}'",
.{
bun.asByteSlice(@errorName(err)),
result.name,
result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any),
},
),
};
defer bun.default_allocator.free(msg);
const name: []const u8 = switch (result.err) {
error.TarballFailedToExtract => "PackageExtractionError",
error.TarballHTTP403 => "TarballForbiddenError",
error.TarballHTTP404 => "TarballNotFoundError",
else => "TarballDownloadError",
};
var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis);
if (result.url.len > 0)
error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis));
if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) {
error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toJS(globalThis));
}
const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?;
error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(
this.parse_result.ast.import_records.at(import_record_id).path.text,
).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis));
error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line));
if (location.line_text) |line_text| {
error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis));
}
error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column));
const promise_value = this.promise.swap();
var promise = promise_value.asInternalPromise().?;
promise_value.ensureStillAlive();
this.poll_ref.unref(vm);
this.deinit();
promise.rejectAsHandled(globalThis, error_instance);
}
pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource {
debug("resumeLoadingModule: {s}", .{this.specifier});
var parse_result = this.parse_result;
const path = this.path;
var jsc_vm = VirtualMachine.get();
const specifier = this.specifier;
const old_log = jsc_vm.log;
jsc_vm.transpiler.linker.log = log;
jsc_vm.transpiler.log = log;
jsc_vm.transpiler.resolver.log = log;
jsc_vm.packageManager().log = log;
defer {
jsc_vm.transpiler.linker.log = old_log;
jsc_vm.transpiler.log = old_log;
jsc_vm.transpiler.resolver.log = old_log;
jsc_vm.packageManager().log = old_log;
}
// We _must_ link because:
// - node_modules bundle won't be properly
try jsc_vm.transpiler.linker.link(
path,
&parse_result,
jsc_vm.origin,
.absolute_path,
false,
true,
);
this.parse_result = parse_result;
var printer = VirtualMachine.source_code_printer.?.*;
printer.ctx.reset();
{
var mapper = jsc_vm.sourceMapHandler(&printer);
defer VirtualMachine.source_code_printer.?.* = printer;
_ = try jsc_vm.transpiler.printWithSourceMap(
parse_result,
@TypeOf(&printer),
&printer,
.esm_ascii,
mapper.get(),
);
}
if (comptime Environment.dump_source) {
dumpSource(jsc_vm, specifier, &printer);
}
if (jsc_vm.isWatcherEnabled()) {
var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, bun.String.init(specifier), path.text, null, false);
if (parse_result.input_fd) |fd_| {
if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
_ = jsc_vm.bun_watcher.addFile(
fd_,
path.text,
this.hash,
options.Loader.fromAPI(this.loader),
.invalid,
this.package_json,
true,
);
}
}
resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs;
return resolved_source;
}
return ResolvedSource{
.allocator = null,
.source_code = bun.String.cloneLatin1(printer.ctx.getWritten()),
.specifier = String.init(specifier),
.source_url = String.init(path.text),
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
};
}
pub fn deinit(this: *AsyncModule) void {
this.promise.deinit();
this.parse_result.deinit();
this.arena.deinit();
this.globalThis.bunVM().allocator.destroy(this.arena);
// bun.default_allocator.free(this.stmt_blocks);
// bun.default_allocator.free(this.expr_blocks);
bun.default_allocator.free(this.string_buf);
}
extern "c" fn Bun__onFulfillAsyncModule(
globalObject: *JSGlobalObject,
promiseValue: JSValue,
res: *jsc.ErrorableResolvedSource,
specifier: *bun.String,
referrer: *bun.String,
) void;
};
const Dependency = @import("../install/dependency.zig");
const Fs = @import("../fs.zig");
const options = @import("../options.zig");
const std = @import("std");
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const dumpSource = @import("./RuntimeTranspilerStore.zig").dumpSource;
const Install = @import("../install/install.zig");
const PackageManager = @import("../install/install.zig").PackageManager;
const bun = @import("bun");
const Async = bun.Async;
const Environment = bun.Environment;
const Output = bun.Output;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const String = bun.String;
const logger = bun.logger;
const strings = bun.strings;
const ParseResult = bun.transpiler.ParseResult;
const api = bun.schema.api;
const jsc = bun.jsc;
const JSGlobalObject = bun.jsc.JSGlobalObject;
const JSValue = bun.jsc.JSValue;
const ResolvedSource = bun.jsc.ResolvedSource;
const VirtualMachine = bun.jsc.VirtualMachine;
const ZigString = bun.jsc.ZigString;

View File

@@ -0,0 +1,431 @@
const string = []const u8;
pub const HardcodedModule = enum {
bun,
@"abort-controller",
@"bun:app",
@"bun:ffi",
@"bun:jsc",
@"bun:main",
@"bun:test",
@"bun:wrap",
@"bun:sqlite",
@"node:assert",
@"node:assert/strict",
@"node:async_hooks",
@"node:buffer",
@"node:child_process",
@"node:console",
@"node:constants",
@"node:crypto",
@"node:dns",
@"node:dns/promises",
@"node:domain",
@"node:events",
@"node:fs",
@"node:fs/promises",
@"node:http",
@"node:https",
@"node:module",
@"node:net",
@"node:os",
@"node:path",
@"node:path/posix",
@"node:path/win32",
@"node:perf_hooks",
@"node:process",
@"node:querystring",
@"node:readline",
@"node:readline/promises",
@"node:stream",
@"node:stream/consumers",
@"node:stream/promises",
@"node:stream/web",
@"node:string_decoder",
@"node:test",
@"node:timers",
@"node:timers/promises",
@"node:tls",
@"node:tty",
@"node:url",
@"node:util",
@"node:util/types",
@"node:vm",
@"node:wasi",
@"node:zlib",
@"node:worker_threads",
@"node:punycode",
undici,
ws,
@"isomorphic-fetch",
@"node-fetch",
vercel_fetch,
@"utf-8-validate",
@"node:v8",
@"node:trace_events",
@"node:repl",
@"node:inspector",
@"node:http2",
@"node:diagnostics_channel",
@"node:dgram",
@"node:cluster",
@"node:_stream_duplex",
@"node:_stream_passthrough",
@"node:_stream_readable",
@"node:_stream_transform",
@"node:_stream_wrap",
@"node:_stream_writable",
@"node:_tls_common",
@"node:_http_agent",
@"node:_http_client",
@"node:_http_common",
@"node:_http_incoming",
@"node:_http_outgoing",
@"node:_http_server",
/// This is gated behind '--expose-internals'
@"bun:internal-for-testing",
/// The module loader first uses `Aliases` to get a single string during
/// resolution, then maps that single string to the actual module.
/// Do not include aliases here; Those go in `Aliases`.
pub const map = bun.ComptimeStringMap(HardcodedModule, [_]struct { []const u8, HardcodedModule }{
// Bun
.{ "bun", .bun },
.{ "bun:app", .@"bun:app" },
.{ "bun:ffi", .@"bun:ffi" },
.{ "bun:jsc", .@"bun:jsc" },
.{ "bun:main", .@"bun:main" },
.{ "bun:test", .@"bun:test" },
.{ "bun:sqlite", .@"bun:sqlite" },
.{ "bun:wrap", .@"bun:wrap" },
.{ "bun:internal-for-testing", .@"bun:internal-for-testing" },
// Node.js
.{ "node:assert", .@"node:assert" },
.{ "node:assert/strict", .@"node:assert/strict" },
.{ "node:async_hooks", .@"node:async_hooks" },
.{ "node:buffer", .@"node:buffer" },
.{ "node:child_process", .@"node:child_process" },
.{ "node:cluster", .@"node:cluster" },
.{ "node:console", .@"node:console" },
.{ "node:constants", .@"node:constants" },
.{ "node:crypto", .@"node:crypto" },
.{ "node:dgram", .@"node:dgram" },
.{ "node:diagnostics_channel", .@"node:diagnostics_channel" },
.{ "node:dns", .@"node:dns" },
.{ "node:dns/promises", .@"node:dns/promises" },
.{ "node:domain", .@"node:domain" },
.{ "node:events", .@"node:events" },
.{ "node:fs", .@"node:fs" },
.{ "node:fs/promises", .@"node:fs/promises" },
.{ "node:http", .@"node:http" },
.{ "node:http2", .@"node:http2" },
.{ "node:https", .@"node:https" },
.{ "node:inspector", .@"node:inspector" },
.{ "node:module", .@"node:module" },
.{ "node:net", .@"node:net" },
.{ "node:readline", .@"node:readline" },
.{ "node:test", .@"node:test" },
.{ "node:os", .@"node:os" },
.{ "node:path", .@"node:path" },
.{ "node:path/posix", .@"node:path/posix" },
.{ "node:path/win32", .@"node:path/win32" },
.{ "node:perf_hooks", .@"node:perf_hooks" },
.{ "node:process", .@"node:process" },
.{ "node:punycode", .@"node:punycode" },
.{ "node:querystring", .@"node:querystring" },
.{ "node:readline/promises", .@"node:readline/promises" },
.{ "node:repl", .@"node:repl" },
.{ "node:stream", .@"node:stream" },
.{ "node:stream/consumers", .@"node:stream/consumers" },
.{ "node:stream/promises", .@"node:stream/promises" },
.{ "node:stream/web", .@"node:stream/web" },
.{ "node:string_decoder", .@"node:string_decoder" },
.{ "node:timers", .@"node:timers" },
.{ "node:timers/promises", .@"node:timers/promises" },
.{ "node:tls", .@"node:tls" },
.{ "node:trace_events", .@"node:trace_events" },
.{ "node:tty", .@"node:tty" },
.{ "node:url", .@"node:url" },
.{ "node:util", .@"node:util" },
.{ "node:util/types", .@"node:util/types" },
.{ "node:v8", .@"node:v8" },
.{ "node:vm", .@"node:vm" },
.{ "node:wasi", .@"node:wasi" },
.{ "node:worker_threads", .@"node:worker_threads" },
.{ "node:zlib", .@"node:zlib" },
.{ "node:_stream_duplex", .@"node:_stream_duplex" },
.{ "node:_stream_passthrough", .@"node:_stream_passthrough" },
.{ "node:_stream_readable", .@"node:_stream_readable" },
.{ "node:_stream_transform", .@"node:_stream_transform" },
.{ "node:_stream_wrap", .@"node:_stream_wrap" },
.{ "node:_stream_writable", .@"node:_stream_writable" },
.{ "node:_tls_common", .@"node:_tls_common" },
.{ "node:_http_agent", .@"node:_http_agent" },
.{ "node:_http_client", .@"node:_http_client" },
.{ "node:_http_common", .@"node:_http_common" },
.{ "node:_http_incoming", .@"node:_http_incoming" },
.{ "node:_http_outgoing", .@"node:_http_outgoing" },
.{ "node:_http_server", .@"node:_http_server" },
.{ "node-fetch", HardcodedModule.@"node-fetch" },
.{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" },
.{ "undici", HardcodedModule.undici },
.{ "ws", HardcodedModule.ws },
.{ "@vercel/fetch", HardcodedModule.vercel_fetch },
.{ "utf-8-validate", HardcodedModule.@"utf-8-validate" },
.{ "abort-controller", HardcodedModule.@"abort-controller" },
});
/// Contains the list of built-in modules from the perspective of the module
/// loader. This logic is duplicated for `isBuiltinModule` and the like.
pub const Alias = struct {
path: [:0]const u8,
tag: ImportRecord.Tag = .builtin,
node_builtin: bool = false,
node_only_prefix: bool = false,
fn nodeEntry(comptime path: [:0]const u8) struct { string, Alias } {
return .{
path,
.{
.path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path,
.node_builtin = true,
},
};
}
fn nodeEntryOnlyPrefix(comptime path: [:0]const u8) struct { string, Alias } {
return .{
path,
.{
.path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path,
.node_builtin = true,
.node_only_prefix = true,
},
};
}
fn entry(comptime path: [:0]const u8) struct { string, Alias } {
return .{ path, .{ .path = path } };
}
// Applied to both --target=bun and --target=node
const common_alias_kvs = [_]struct { string, Alias }{
nodeEntry("node:assert"),
nodeEntry("node:assert/strict"),
nodeEntry("node:async_hooks"),
nodeEntry("node:buffer"),
nodeEntry("node:child_process"),
nodeEntry("node:cluster"),
nodeEntry("node:console"),
nodeEntry("node:constants"),
nodeEntry("node:crypto"),
nodeEntry("node:dgram"),
nodeEntry("node:diagnostics_channel"),
nodeEntry("node:dns"),
nodeEntry("node:dns/promises"),
nodeEntry("node:domain"),
nodeEntry("node:events"),
nodeEntry("node:fs"),
nodeEntry("node:fs/promises"),
nodeEntry("node:http"),
nodeEntry("node:http2"),
nodeEntry("node:https"),
nodeEntry("node:inspector"),
nodeEntry("node:module"),
nodeEntry("node:net"),
nodeEntry("node:os"),
nodeEntry("node:path"),
nodeEntry("node:path/posix"),
nodeEntry("node:path/win32"),
nodeEntry("node:perf_hooks"),
nodeEntry("node:process"),
nodeEntry("node:punycode"),
nodeEntry("node:querystring"),
nodeEntry("node:readline"),
nodeEntry("node:readline/promises"),
nodeEntry("node:repl"),
nodeEntry("node:stream"),
nodeEntry("node:stream/consumers"),
nodeEntry("node:stream/promises"),
nodeEntry("node:stream/web"),
nodeEntry("node:string_decoder"),
nodeEntry("node:timers"),
nodeEntry("node:timers/promises"),
nodeEntry("node:tls"),
nodeEntry("node:trace_events"),
nodeEntry("node:tty"),
nodeEntry("node:url"),
nodeEntry("node:util"),
nodeEntry("node:util/types"),
nodeEntry("node:v8"),
nodeEntry("node:vm"),
nodeEntry("node:wasi"),
nodeEntry("node:worker_threads"),
nodeEntry("node:zlib"),
// New Node.js builtins only resolve from the prefixed one.
nodeEntryOnlyPrefix("node:test"),
nodeEntry("assert"),
nodeEntry("assert/strict"),
nodeEntry("async_hooks"),
nodeEntry("buffer"),
nodeEntry("child_process"),
nodeEntry("cluster"),
nodeEntry("console"),
nodeEntry("constants"),
nodeEntry("crypto"),
nodeEntry("dgram"),
nodeEntry("diagnostics_channel"),
nodeEntry("dns"),
nodeEntry("dns/promises"),
nodeEntry("domain"),
nodeEntry("events"),
nodeEntry("fs"),
nodeEntry("fs/promises"),
nodeEntry("http"),
nodeEntry("http2"),
nodeEntry("https"),
nodeEntry("inspector"),
nodeEntry("module"),
nodeEntry("net"),
nodeEntry("os"),
nodeEntry("path"),
nodeEntry("path/posix"),
nodeEntry("path/win32"),
nodeEntry("perf_hooks"),
nodeEntry("process"),
nodeEntry("punycode"),
nodeEntry("querystring"),
nodeEntry("readline"),
nodeEntry("readline/promises"),
nodeEntry("repl"),
nodeEntry("stream"),
nodeEntry("stream/consumers"),
nodeEntry("stream/promises"),
nodeEntry("stream/web"),
nodeEntry("string_decoder"),
nodeEntry("timers"),
nodeEntry("timers/promises"),
nodeEntry("tls"),
nodeEntry("trace_events"),
nodeEntry("tty"),
nodeEntry("url"),
nodeEntry("util"),
nodeEntry("util/types"),
nodeEntry("v8"),
nodeEntry("vm"),
nodeEntry("wasi"),
nodeEntry("worker_threads"),
nodeEntry("zlib"),
nodeEntry("node:_http_agent"),
nodeEntry("node:_http_client"),
nodeEntry("node:_http_common"),
nodeEntry("node:_http_incoming"),
nodeEntry("node:_http_outgoing"),
nodeEntry("node:_http_server"),
nodeEntry("_http_agent"),
nodeEntry("_http_client"),
nodeEntry("_http_common"),
nodeEntry("_http_incoming"),
nodeEntry("_http_outgoing"),
nodeEntry("_http_server"),
// sys is a deprecated alias for util
.{ "sys", .{ .path = "node:util", .node_builtin = true } },
.{ "node:sys", .{ .path = "node:util", .node_builtin = true } },
// These are returned in builtinModules, but probably not many
// packages use them so we will just alias them.
.{ "node:_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } },
.{ "node:_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } },
.{ "node:_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } },
.{ "node:_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } },
.{ "node:_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } },
.{ "node:_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } },
.{ "node:_tls_wrap", .{ .path = "node:tls", .node_builtin = true } },
.{ "node:_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } },
.{ "_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } },
.{ "_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } },
.{ "_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } },
.{ "_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } },
.{ "_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } },
.{ "_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } },
.{ "_tls_wrap", .{ .path = "node:tls", .node_builtin = true } },
.{ "_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } },
};
const bun_extra_alias_kvs = [_]struct { string, Alias }{
.{ "bun", .{ .path = "bun", .tag = .bun } },
.{ "bun:test", .{ .path = "bun:test" } },
.{ "bun:app", .{ .path = "bun:app" } },
.{ "bun:ffi", .{ .path = "bun:ffi" } },
.{ "bun:jsc", .{ .path = "bun:jsc" } },
.{ "bun:sqlite", .{ .path = "bun:sqlite" } },
.{ "bun:wrap", .{ .path = "bun:wrap" } },
.{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } },
.{ "ffi", .{ .path = "bun:ffi" } },
// inspector/promises is not implemented, it is an alias of inspector
.{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
.{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } },
// Thirdparty packages we override
.{ "@vercel/fetch", .{ .path = "@vercel/fetch" } },
.{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } },
.{ "node-fetch", .{ .path = "node-fetch" } },
.{ "undici", .{ .path = "undici" } },
.{ "utf-8-validate", .{ .path = "utf-8-validate" } },
.{ "ws", .{ .path = "ws" } },
.{ "ws/lib/websocket", .{ .path = "ws" } },
// Polyfills we force to native
.{ "abort-controller", .{ .path = "abort-controller" } },
.{ "abort-controller/polyfill", .{ .path = "abort-controller" } },
// To force Next.js to not use bundled dependencies.
.{ "next/dist/compiled/ws", .{ .path = "ws" } },
.{ "next/dist/compiled/node-fetch", .{ .path = "node-fetch" } },
.{ "next/dist/compiled/undici", .{ .path = "undici" } },
};
const bun_test_extra_alias_kvs = [_]struct { string, Alias }{
.{ "@jest/globals", .{ .path = "bun:test" } },
.{ "vitest", .{ .path = "bun:test" } },
};
const node_extra_alias_kvs = [_]struct { string, Alias }{
nodeEntry("node:inspector/promises"),
nodeEntry("inspector/promises"),
};
const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs);
pub const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs);
const bun_test_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs ++ bun_test_extra_alias_kvs);
const Cfg = struct { rewrite_jest_for_tests: bool = false };
pub fn has(name: []const u8, target: options.Target, cfg: Cfg) bool {
return get(name, target, cfg) != null;
}
pub fn get(name: []const u8, target: options.Target, cfg: Cfg) ?Alias {
if (target.isBun()) {
if (cfg.rewrite_jest_for_tests) {
return bun_test_aliases.get(name);
} else {
return bun_aliases.get(name);
}
} else if (target.isNode()) {
return node_aliases.get(name);
}
return null;
}
};
};
const bun = @import("bun");
const options = @import("../options.zig");
const std = @import("std");
const ast = @import("../import_record.zig");
const ImportRecord = ast.ImportRecord;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,626 @@
const debug = Output.scoped(.RuntimeTranspilerStore, .hidden);
const string = []const u8;
pub fn dumpSource(vm: *VirtualMachine, specifier: string, printer: anytype) void {
dumpSourceString(vm, specifier, printer.ctx.getWritten());
}
pub fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8) void {
dumpSourceStringFailiable(vm, specifier, written) catch |e| {
Output.debugWarn("Failed to dump source string: {}", .{e});
};
}
pub fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void {
if (!Environment.isDebug) return;
if (bun.feature_flag.BUN_DEBUG_NO_DUMP.get()) return;
const BunDebugHolder = struct {
pub var dir: ?std.fs.Dir = null;
pub var lock: bun.Mutex = .{};
};
BunDebugHolder.lock.lock();
defer BunDebugHolder.lock.unlock();
const dir = BunDebugHolder.dir orelse dir: {
const base_name = switch (Environment.os) {
else => "/tmp/bun-debug-src/",
.windows => brk: {
const temp = bun.fs.FileSystem.RealFS.platformTempDir();
var win_temp_buffer: bun.PathBuffer = undefined;
@memcpy(win_temp_buffer[0..temp.len], temp);
const suffix = "\\bun-debug-src";
@memcpy(win_temp_buffer[temp.len .. temp.len + suffix.len], suffix);
win_temp_buffer[temp.len + suffix.len] = 0;
break :brk win_temp_buffer[0 .. temp.len + suffix.len :0];
},
};
const dir = try std.fs.cwd().makeOpenPath(base_name, .{});
BunDebugHolder.dir = dir;
break :dir dir;
};
if (std.fs.path.dirname(specifier)) |dir_path| {
const root_len = switch (Environment.os) {
else => "/".len,
.windows => bun.path.windowsFilesystemRoot(dir_path).len,
};
var parent = try dir.makeOpenPath(dir_path[root_len..], .{});
defer parent.close();
parent.writeFile(.{
.sub_path = std.fs.path.basename(specifier),
.data = written,
}) catch |e| {
Output.debugWarn("Failed to dump source string: writeFile {}", .{e});
return;
};
if (vm.source_mappings.get(specifier)) |mappings| {
defer mappings.deref();
const map_path = bun.handleOom(std.mem.concat(bun.default_allocator, u8, &.{ std.fs.path.basename(specifier), ".map" }));
defer bun.default_allocator.free(map_path);
const file = try parent.createFile(map_path, .{});
defer file.close();
const source_file = parent.readFileAlloc(
bun.default_allocator,
specifier,
std.math.maxInt(u64),
) catch "";
defer bun.default_allocator.free(source_file);
var bufw = std.io.bufferedWriter(file.writer());
const w = bufw.writer();
try w.print(
\\{{
\\ "version": 3,
\\ "file": {},
\\ "sourceRoot": "",
\\ "sources": [{}],
\\ "sourcesContent": [{}],
\\ "names": [],
\\ "mappings": "{}"
\\}}
, .{
bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}),
bun.fmt.formatJSONStringUTF8(specifier, .{}),
bun.fmt.formatJSONStringUTF8(source_file, .{}),
mappings.formatVLQs(),
});
try bufw.flush();
}
} else {
dir.writeFile(.{
.sub_path = std.fs.path.basename(specifier),
.data = written,
}) catch return;
}
}
pub fn setBreakPointOnFirstLine() bool {
const s = struct {
var set_break_point: std.atomic.Value(bool) = std.atomic.Value(bool).init(true);
};
return s.set_break_point.swap(false, .seq_cst);
}
pub const RuntimeTranspilerStore = struct {
generation_number: std.atomic.Value(u32) = std.atomic.Value(u32).init(0),
store: TranspilerJob.Store,
enabled: bool = true,
queue: Queue = Queue{},
pub const Queue = bun.UnboundedQueue(TranspilerJob, .next);
pub fn init() RuntimeTranspilerStore {
return RuntimeTranspilerStore{
.store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)),
};
}
pub fn runFromJSThread(this: *RuntimeTranspilerStore, event_loop: *jsc.EventLoop, global: *jsc.JSGlobalObject, vm: *jsc.VirtualMachine) void {
var batch = this.queue.popBatch();
const jsc_vm = vm.jsc_vm;
var iter = batch.iterator();
if (iter.next()) |job| {
// we run just one job first to see if there are more
job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err);
} else {
return;
}
while (iter.next()) |job| {
// if there are more, we need to drain the microtasks from the previous run
event_loop.drainMicrotasksWithGlobal(global, jsc_vm) catch return;
job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err);
}
// immediately after this is called, the microtasks will be drained again.
}
pub fn transpile(
this: *RuntimeTranspilerStore,
vm: *VirtualMachine,
globalObject: *JSGlobalObject,
input_specifier: bun.String,
path: Fs.Path,
referrer: bun.String,
loader: bun.options.Loader,
package_json: ?*const PackageJSON,
) *anyopaque {
var job: *TranspilerJob = this.store.get();
const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable);
const promise = jsc.JSInternalPromise.create(globalObject);
// NOTE: DirInfo should already be cached since module loading happens
// after module resolution, so this should be cheap
var resolved_source = ResolvedSource{};
if (package_json) |pkg| {
switch (pkg.module_type) {
.cjs => {
resolved_source.tag = .package_json_type_commonjs;
resolved_source.is_commonjs_module = true;
},
.esm => resolved_source.tag = .package_json_type_module,
.unknown => {},
}
}
job.* = TranspilerJob{
.non_threadsafe_input_specifier = input_specifier,
.path = owned_path,
.globalThis = globalObject,
.non_threadsafe_referrer = referrer,
.vm = vm,
.log = logger.Log.init(bun.default_allocator),
.loader = loader,
.promise = .create(JSValue.fromCell(promise), globalObject),
.poll_ref = .{},
.fetcher = TranspilerJob.Fetcher{
.file = {},
},
.resolved_source = resolved_source,
.generation_number = this.generation_number.load(.seq_cst),
};
if (comptime Environment.allow_assert)
debug("transpile({s}, {s}, async)", .{ path.text, @tagName(job.loader) });
job.schedule();
return promise;
}
pub const TranspilerJob = struct {
path: Fs.Path,
non_threadsafe_input_specifier: String,
non_threadsafe_referrer: String,
loader: options.Loader,
promise: jsc.Strong.Optional = .empty,
vm: *VirtualMachine,
globalThis: *JSGlobalObject,
fetcher: Fetcher,
poll_ref: Async.KeepAlive = .{},
generation_number: u32 = 0,
log: logger.Log,
parse_error: ?anyerror = null,
resolved_source: ResolvedSource = ResolvedSource{},
work_task: jsc.WorkPoolTask = .{ .callback = runFromWorkerThread },
next: ?*TranspilerJob = null,
pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback;
pub const Fetcher = union(enum) {
virtual_module: bun.String,
file: void,
pub fn deinit(this: *@This()) void {
if (this.* == .virtual_module) {
this.virtual_module.deref();
}
}
};
pub fn deinit(this: *TranspilerJob) void {
bun.default_allocator.free(this.path.text);
this.poll_ref.disable();
this.fetcher.deinit();
this.loader = options.Loader.file;
this.non_threadsafe_input_specifier.deref();
this.non_threadsafe_referrer.deref();
this.path = Fs.Path.empty;
this.log.deinit();
this.promise.deinit();
this.globalThis = undefined;
}
threadlocal var ast_memory_store: ?*js_ast.ASTMemoryAllocator = null;
threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null;
pub fn dispatchToMainThread(this: *TranspilerJob) void {
this.vm.transpiler_store.queue.push(this);
this.vm.eventLoop().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(&this.vm.transpiler_store));
}
pub fn runFromJSThread(this: *TranspilerJob) bun.JSError!void {
var vm = this.vm;
const promise = this.promise.swap();
const globalThis = this.globalThis;
this.poll_ref.unref(vm);
const referrer = this.non_threadsafe_referrer;
this.non_threadsafe_referrer = String.empty;
var log = this.log;
this.log = logger.Log.init(bun.default_allocator);
var resolved_source = this.resolved_source;
const specifier = brk: {
if (this.parse_error != null) {
break :brk bun.String.cloneUTF8(this.path.text);
}
const out = this.non_threadsafe_input_specifier;
this.non_threadsafe_input_specifier = String.empty;
bun.debugAssert(resolved_source.source_url.isEmpty());
bun.debugAssert(resolved_source.specifier.isEmpty());
resolved_source.source_url = out.createIfDifferent(this.path.text);
resolved_source.specifier = out.dupeRef();
break :brk out;
};
const parse_error = this.parse_error;
this.promise.deinit();
this.deinit();
_ = vm.transpiler_store.store.put(this);
try AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log);
}
pub fn schedule(this: *TranspilerJob) void {
this.poll_ref.ref(this.vm);
jsc.WorkPool.schedule(&this.work_task);
}
pub fn runFromWorkerThread(work_task: *jsc.WorkPoolTask) void {
@as(*TranspilerJob, @fieldParentPtr("work_task", work_task)).run();
}
pub fn run(this: *TranspilerJob) void {
var arena = bun.ArenaAllocator.init(bun.default_allocator);
defer arena.deinit();
const allocator = arena.allocator();
defer this.dispatchToMainThread();
if (this.generation_number != this.vm.transpiler_store.generation_number.load(.monotonic)) {
this.parse_error = error.TranspilerJobGenerationMismatch;
return;
}
if (ast_memory_store == null) {
ast_memory_store = bun.handleOom(bun.default_allocator.create(js_ast.ASTMemoryAllocator));
ast_memory_store.?.* = js_ast.ASTMemoryAllocator{
.allocator = allocator,
.previous = null,
};
}
var ast_scope = ast_memory_store.?.enter(allocator);
defer ast_scope.exit();
const path = this.path;
const specifier = this.path.text;
const loader = this.loader;
var cache = jsc.RuntimeTranspilerCache{
.output_code_allocator = allocator,
.sourcemap_allocator = bun.default_allocator,
};
var log = logger.Log.init(allocator);
defer {
this.log = logger.Log.init(bun.default_allocator);
bun.handleOom(log.cloneToWithRecycled(&this.log, true));
}
var vm = this.vm;
var transpiler: bun.Transpiler = undefined;
transpiler = vm.transpiler;
transpiler.setAllocator(allocator);
transpiler.setLog(&log);
transpiler.resolver.opts = transpiler.options;
transpiler.macro_context = null;
transpiler.linker.resolver = &transpiler.resolver;
var fd: ?StoredFileDescriptorType = null;
var package_json: ?*PackageJSON = null;
const hash = bun.Watcher.getHash(path.text);
switch (vm.bun_watcher) {
.hot, .watch => {
if (vm.bun_watcher.indexOf(hash)) |index| {
const watcher_fd = vm.bun_watcher.watchlist().items(.fd)[index];
fd = if (watcher_fd.stdioTag() == null) watcher_fd else null;
package_json = vm.bun_watcher.watchlist().items(.package_json)[index];
}
},
else => {},
}
// this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words
const is_node_override = strings.hasPrefixComptime(specifier, node_fallbacks.import_path);
const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override)
MacroRemap{}
else
transpiler.options.macro_remap;
var fallback_source: logger.Source = undefined;
// Usually, we want to close the input file automatically.
//
// If we're re-using the file descriptor from the fs watcher
// Do not close it because that will break the kqueue-based watcher
//
var should_close_input_file_fd = fd == null;
var input_file_fd: StoredFileDescriptorType = .invalid;
const is_main = vm.main.len == path.text.len and
vm.main_hash == hash and
strings.eqlLong(vm.main, path.text, false);
const module_type: ModuleType = switch (this.resolved_source.tag) {
.package_json_type_commonjs => .cjs,
.package_json_type_module => .esm,
else => .unknown,
};
var parse_options = Transpiler.ParseOptions{
.allocator = allocator,
.path = path,
.loader = loader,
.dirname_fd = .invalid,
.file_descriptor = fd,
.file_fd_ptr = &input_file_fd,
.file_hash = hash,
.macro_remappings = macro_remappings,
.jsx = transpiler.options.jsx,
.emit_decorator_metadata = transpiler.options.emit_decorator_metadata,
.virtual_source = null,
.dont_bundle_twice = true,
.allow_commonjs = true,
.inject_jest_globals = transpiler.options.rewrite_jest_for_tests,
.set_breakpoint_on_first_line = vm.debugger != null and
vm.debugger.?.set_breakpoint_on_first_line and
is_main and
setBreakPointOnFirstLine(),
.runtime_transpiler_cache = if (!jsc.RuntimeTranspilerCache.is_disabled) &cache else null,
.remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null,
.module_type = module_type,
.allow_bytecode_cache = true,
};
defer {
if (should_close_input_file_fd and input_file_fd.isValid()) {
input_file_fd.close();
input_file_fd = .invalid;
}
}
if (is_node_override) {
if (node_fallbacks.contentsFromPath(specifier)) |code| {
const fallback_path = Fs.Path.initWithNamespace(specifier, "node");
fallback_source = logger.Source{ .path = fallback_path, .contents = code };
parse_options.virtual_source = &fallback_source;
}
}
var parse_result: bun.transpiler.ParseResult = transpiler.parseMaybeReturnFileOnlyAllowSharedBuffer(
parse_options,
null,
false,
false,
) orelse {
if (vm.isWatcherEnabled()) {
if (input_file_fd.isValid()) {
if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) {
should_close_input_file_fd = false;
_ = vm.bun_watcher.addFile(
input_file_fd,
path.text,
hash,
loader,
.invalid,
package_json,
true,
);
}
}
}
this.parse_error = error.ParseError;
return;
};
if (vm.isWatcherEnabled()) {
if (input_file_fd.isValid()) {
if (!is_node_override and
std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules"))
{
should_close_input_file_fd = false;
_ = vm.bun_watcher.addFile(
input_file_fd,
path.text,
hash,
loader,
.invalid,
package_json,
true,
);
}
}
}
if (cache.entry) |*entry| {
vm.source_mappings.putMappings(&parse_result.source, .{
.list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len },
.allocator = bun.default_allocator,
}) catch {};
if (comptime Environment.dump_source) {
dumpSourceString(vm, specifier, entry.output_code.byteSlice());
}
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = switch (entry.output_code) {
.string => entry.output_code.string,
.utf8 => brk: {
const result = bun.String.cloneUTF8(entry.output_code.utf8);
cache.output_code_allocator.free(entry.output_code.utf8);
entry.output_code.utf8 = "";
break :brk result;
},
},
.is_commonjs_module = entry.metadata.module_type == .cjs,
.tag = this.resolved_source.tag,
};
return;
}
if (parse_result.already_bundled != .none) {
const bytecode_slice = parse_result.already_bundled.bytecodeSlice();
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = bun.String.cloneLatin1(parse_result.source.contents),
.already_bundled = true,
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
.bytecode_cache_size = bytecode_slice.len,
.is_commonjs_module = parse_result.already_bundled.isCommonJS(),
.tag = this.resolved_source.tag,
};
this.resolved_source.source_code.ensureHash();
return;
}
for (parse_result.ast.import_records.slice()) |*import_record_| {
var import_record: *bun.ImportRecord = import_record_;
if (HardcodedModule.Alias.get(import_record.path.text, transpiler.options.target, .{ .rewrite_jest_for_tests = transpiler.options.rewrite_jest_for_tests })) |replacement| {
import_record.path.text = replacement.path;
import_record.tag = replacement.tag;
import_record.is_external_without_side_effects = true;
continue;
}
if (strings.hasPrefixComptime(import_record.path.text, "bun:")) {
import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]);
import_record.path.namespace = "bun";
import_record.is_external_without_side_effects = true;
}
}
if (source_code_printer == null) {
const writer = js_printer.BufferWriter.init(bun.default_allocator);
source_code_printer = bun.default_allocator.create(js_printer.BufferPrinter) catch unreachable;
source_code_printer.?.* = js_printer.BufferPrinter.init(writer);
source_code_printer.?.ctx.append_null_byte = false;
}
var printer = source_code_printer.?.*;
printer.ctx.reset();
// Cap buffer size to prevent unbounded growth
const max_buffer_cap = 512 * 1024;
if (printer.ctx.buffer.list.capacity > max_buffer_cap) {
printer.ctx.buffer.deinit();
const writer = js_printer.BufferWriter.init(bun.default_allocator);
source_code_printer.?.* = js_printer.BufferPrinter.init(writer);
source_code_printer.?.ctx.append_null_byte = false;
printer = source_code_printer.?.*;
}
{
var mapper = vm.sourceMapHandler(&printer);
defer source_code_printer.?.* = printer;
_ = transpiler.printWithSourceMap(
parse_result,
@TypeOf(&printer),
&printer,
.esm_ascii,
mapper.get(),
) catch |err| {
this.parse_error = err;
return;
};
}
if (comptime Environment.dump_source) {
dumpSource(this.vm, specifier, &printer);
}
const source_code = brk: {
const written = printer.ctx.getWritten();
const result = cache.output_code orelse bun.String.cloneLatin1(written);
if (written.len > 1024 * 1024 * 2 or vm.smol) {
printer.ctx.buffer.deinit();
const writer = js_printer.BufferWriter.init(bun.default_allocator);
source_code_printer.?.* = js_printer.BufferPrinter.init(writer);
source_code_printer.?.ctx.append_null_byte = false;
} else {
source_code_printer.?.* = printer;
}
// In a benchmarking loading @babel/standalone 100 times:
//
// After ensureHash:
// 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const
//
// Before ensureHash:
// 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const
//
result.ensureHash();
break :brk result;
};
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = source_code,
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,
.tag = this.resolved_source.tag,
};
}
};
};
const Fs = @import("../fs.zig");
const node_fallbacks = @import("../node_fallbacks.zig");
const std = @import("std");
const AsyncModule = @import("./AsyncModule.zig").AsyncModule;
const HardcodedModule = @import("./HardcodedModule.zig").HardcodedModule;
const options = @import("../options.zig");
const ModuleType = options.ModuleType;
const MacroRemap = @import("../resolver/package_json.zig").MacroMap;
const PackageJSON = @import("../resolver/package_json.zig").PackageJSON;
const bun = @import("bun");
const Async = bun.Async;
const Environment = bun.Environment;
const Output = bun.Output;
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
const String = bun.String;
const Transpiler = bun.Transpiler;
const js_ast = bun.ast;
const js_printer = bun.js_printer;
const logger = bun.logger;
const strings = bun.strings;
const jsc = bun.jsc;
const JSGlobalObject = bun.jsc.JSGlobalObject;
const JSValue = bun.jsc.JSValue;
const ResolvedSource = bun.jsc.ResolvedSource;
const VirtualMachine = bun.jsc.VirtualMachine;