From 0fba69d50cb704bbb32bcd3fe4e38c69707763ac Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 24 Oct 2025 23:42:20 -0700 Subject: [PATCH 001/102] Add some internal deprecation @compileError messages --- src/CLAUDE.md | 1 - src/bun.zig | 3 +++ src/main.zig | 47 +++++++++++++++++++++++++---------------------- 3 files changed, 28 insertions(+), 23 deletions(-) diff --git a/src/CLAUDE.md b/src/CLAUDE.md index 21b296e7f1..7b394aa69f 100644 --- a/src/CLAUDE.md +++ b/src/CLAUDE.md @@ -8,5 +8,4 @@ Syntax reminders: Conventions: - Prefer `@import` at the **bottom** of the file, but the auto formatter will move them so you don't need to worry about it. -- Prefer `@import("bun")`. Not `@import("root").bun` or `@import("../bun.zig")`. - You must be patient with the build. diff --git a/src/bun.zig b/src/bun.zig index f1f13acbef..a4c57dcebf 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -3770,3 +3770,6 @@ const CopyFile = @import("./copy_file.zig"); const builtin = @import("builtin"); const std = @import("std"); const Allocator = std.mem.Allocator; + +// Claude thinks its bun.JSC when we renamed it to bun.jsc months ago. +pub const JSC = @compileError("Deprecated: Use @import(\"bun\").jsc instead"); diff --git a/src/main.zig b/src/main.zig index d07a0630c6..cc3856e4f7 100644 --- a/src/main.zig +++ b/src/main.zig @@ -1,4 +1,4 @@ -pub const panic = bun.crash_handler.panic; +pub const panic = _bun.crash_handler.panic; pub const std_options = std.Options{ .enable_segfault_handler = false, }; @@ -6,7 +6,7 @@ pub const std_options = std.Options{ pub const io_mode = .blocking; comptime { - bun.assert(builtin.target.cpu.arch.endian() == .little); + _bun.assert(builtin.target.cpu.arch.endian() == .little); } extern fn bun_warn_avx_missing(url: [*:0]const u8) void; @@ -15,7 +15,7 @@ pub extern "c" var _environ: ?*anyopaque; pub extern "c" var environ: ?*anyopaque; pub fn main() void { - bun.crash_handler.init(); + _bun.crash_handler.init(); if (Environment.isPosix) { var act: std.posix.Sigaction = .{ @@ -28,38 +28,38 @@ pub fn main() void { } if (Environment.isDebug) { - bun.debug_allocator_data.backing = .init; + _bun.debug_allocator_data.backing = .init; } // This should appear before we make any calls at all to libuv. // So it's safest to put it very early in the main function. if (Environment.isWindows) { - _ = bun.windows.libuv.uv_replace_allocator( - &bun.mimalloc.mi_malloc, - &bun.mimalloc.mi_realloc, - &bun.mimalloc.mi_calloc, - &bun.mimalloc.mi_free, + _ = _bun.windows.libuv.uv_replace_allocator( + &_bun.mimalloc.mi_malloc, + &_bun.mimalloc.mi_realloc, + &_bun.mimalloc.mi_calloc, + &_bun.mimalloc.mi_free, ); - bun.handleOom(bun.windows.env.convertEnvToWTF8()); + _bun.handleOom(_bun.windows.env.convertEnvToWTF8()); environ = @ptrCast(std.os.environ.ptr); _environ = @ptrCast(std.os.environ.ptr); } - bun.start_time = std.time.nanoTimestamp(); - bun.initArgv(bun.default_allocator) catch |err| { + _bun.start_time = std.time.nanoTimestamp(); + _bun.initArgv(_bun.default_allocator) catch |err| { Output.panic("Failed to initialize argv: {s}\n", .{@errorName(err)}); }; Output.Source.Stdio.init(); defer Output.flush(); if (Environment.isX64 and Environment.enableSIMD and Environment.isPosix) { - bun_warn_avx_missing(bun.cli.UpgradeCommand.Bun__githubBaselineURL.ptr); + bun_warn_avx_missing(_bun.cli.UpgradeCommand.Bun__githubBaselineURL.ptr); } - bun.StackCheck.configureThread(); + _bun.StackCheck.configureThread(); - bun.cli.Cli.start(bun.default_allocator); - bun.Global.exit(0); + _bun.cli.Cli.start(_bun.default_allocator); + _bun.Global.exit(0); } pub export fn Bun__panic(msg: [*]const u8, len: usize) noreturn { @@ -71,22 +71,25 @@ pub fn copyForwards(comptime T: type, dest: []T, source: []const T) void { if (source.len == 0) { return; } - bun.copy(T, dest[0..source.len], source); + _bun.copy(T, dest[0..source.len], source); } pub fn copyBackwards(comptime T: type, dest: []T, source: []const T) void { if (source.len == 0) { return; } - bun.copy(T, dest[0..source.len], source); + _bun.copy(T, dest[0..source.len], source); } pub fn eqlBytes(src: []const u8, dest: []const u8) bool { - return bun.c.memcmp(src.ptr, dest.ptr, src.len) == 0; + return _bun.c.memcmp(src.ptr, dest.ptr, src.len) == 0; } // -- End Zig Standard Library Additions -- const builtin = @import("builtin"); const std = @import("std"); -const bun = @import("bun"); -const Environment = bun.Environment; -const Output = bun.Output; +// Claude thinks its @import("root").bun when it's @import("bun"). +const bun = @compileError("Deprecated: Use @import(\"bun\") instead"); + +const _bun = @import("bun"); +const Environment = _bun.Environment; +const Output = _bun.Output; From d2c284242037520f239903565a8b4021bb881526 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 25 Oct 2025 00:05:28 -0700 Subject: [PATCH 002/102] Autoformat --- src/bun.zig | 6 +++--- src/main.zig | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/bun.zig b/src/bun.zig index a4c57dcebf..a1230783c9 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -3766,10 +3766,10 @@ pub fn getUseSystemCA(globalObject: *jsc.JSGlobalObject, callFrame: *jsc.CallFra return jsc.JSValue.jsBoolean(Arguments.Bun__Node__UseSystemCA); } +// Claude thinks its bun.JSC when we renamed it to bun.jsc months ago. +pub const JSC = @compileError("Deprecated: Use @import(\"bun\").jsc instead"); + const CopyFile = @import("./copy_file.zig"); const builtin = @import("builtin"); const std = @import("std"); const Allocator = std.mem.Allocator; - -// Claude thinks its bun.JSC when we renamed it to bun.jsc months ago. -pub const JSC = @compileError("Deprecated: Use @import(\"bun\").jsc instead"); diff --git a/src/main.zig b/src/main.zig index cc3856e4f7..f0ee3cd83a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -84,12 +84,12 @@ pub fn eqlBytes(src: []const u8, dest: []const u8) bool { } // -- End Zig Standard Library Additions -- -const builtin = @import("builtin"); -const std = @import("std"); - // Claude thinks its @import("root").bun when it's @import("bun"). const bun = @compileError("Deprecated: Use @import(\"bun\") instead"); +const builtin = @import("builtin"); +const std = @import("std"); + const _bun = @import("bun"); const Environment = _bun.Environment; const Output = _bun.Output; From fb1fbe62e6151ea6e0e9430f714dbb0c30adca6c Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 25 Oct 2025 14:52:34 -0800 Subject: [PATCH 003/102] ci: update alpine linux to 3.22 (#24052) [publish images] --- .buildkite/ci.mjs | 14 +++++++------- dockerhub/alpine/Dockerfile | 4 ++-- package.json | 2 +- scripts/bootstrap.sh | 3 +-- 4 files changed, 11 insertions(+), 12 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 6d27bb7e65..5d3423b0a3 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -108,9 +108,9 @@ const buildPlatforms = [ { os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] }, - { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" }, - { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" }, - { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" }, + { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22" }, + { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22" }, + { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" }, { os: "windows", arch: "x64", release: "2019" }, { os: "windows", arch: "x64", baseline: true, release: "2019" }, ]; @@ -133,9 +133,9 @@ const testPlatforms = [ { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, - { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, - { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" }, + { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" }, + { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" }, + { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" }, { os: "windows", arch: "x64", release: "2019", tier: "oldest" }, { os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" }, ]; @@ -343,7 +343,7 @@ function getZigPlatform() { arch: "aarch64", abi: "musl", distro: "alpine", - release: "3.21", + release: "3.22", }; } diff --git a/dockerhub/alpine/Dockerfile b/dockerhub/alpine/Dockerfile index 8d1ecbaddd..4d5a01876f 100644 --- a/dockerhub/alpine/Dockerfile +++ b/dockerhub/alpine/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.20 AS build +FROM alpine:3.22 AS build # https://github.com/oven-sh/bun/releases ARG BUN_VERSION=latest @@ -44,7 +44,7 @@ RUN apk --no-cache add ca-certificates curl dirmngr gpg gpg-agent unzip \ && rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \ && chmod +x /usr/local/bin/bun -FROM alpine:3.20 +FROM alpine:3.22 # Disable the runtime transpiler cache by default inside Docker containers. # On ephemeral containers, the cache is not useful diff --git a/package.json b/package.json index bc4df314a6..c0fcee4b5f 100644 --- a/package.json +++ b/package.json @@ -86,7 +86,7 @@ "clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true", "machine:linux:ubuntu": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=ubuntu --release=25.04", "machine:linux:debian": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=debian --release=12", - "machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.21", + "machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.22", "machine:linux:amazonlinux": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=amazonlinux --release=2023", "machine:windows:2019": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=windows --release=2019", "sync-webkit-source": "bun ./scripts/sync-webkit-source.ts" diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 3537285e05..ebda5460ea 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1060,12 +1060,11 @@ install_llvm() { install_packages "llvm@$(llvm_version)" ;; apk) - # alpine doesn't have a lld19 package on 3.21 atm so use bare one for now install_packages \ "llvm$(llvm_version)" \ "clang$(llvm_version)" \ "scudo-malloc" \ - "lld" \ + "lld$(llvm_version)" \ "llvm$(llvm_version)-dev" # Ensures llvm-symbolizer is installed ;; esac From a2b262ed69402238ec1623b716ea9cb6fade7861 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 25 Oct 2025 14:53:02 -0800 Subject: [PATCH 004/102] ci: update bun version to 1.3.1 (#24053) [publish images] --- scripts/bootstrap.ps1 | 2 +- scripts/bootstrap.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/bootstrap.ps1 b/scripts/bootstrap.ps1 index 9b3cf40315..f5ddf5026d 100755 --- a/scripts/bootstrap.ps1 +++ b/scripts/bootstrap.ps1 @@ -244,7 +244,7 @@ function Install-NodeJs { } function Install-Bun { - Install-Package bun -Version "1.2.17" + Install-Package bun -Version "1.3.1" } function Install-Cygwin { diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index ebda5460ea..62cd622cc6 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -907,7 +907,7 @@ setup_node_gyp_cache() { } bun_version_exact() { - print "1.2.17" + print "1.3.1" } install_bun() { From 3367fa6ae360ec7b3e38cf413e248fc0ec598327 Mon Sep 17 00:00:00 2001 From: robobun Date: Sat, 25 Oct 2025 20:43:02 -0700 Subject: [PATCH 005/102] Refactor: Extract ModuleLoader components into separate files (#24083) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Split `ModuleLoader.zig` into smaller, more focused modules for better code organization and maintainability: - `AsyncModule` → `src/bun.js/AsyncModule.zig` (lines 69-806) - `RuntimeTranspilerStore` → `src/bun.js/RuntimeTranspilerStore.zig` (lines 2028-2606) - `HardcodedModule` → `src/bun.js/HardcodedModule.zig` (lines 2618-3040) ## Changes - Extracted three large components from `ModuleLoader.zig` into separate files - Updated imports in all affected files - Made necessary functions/constants public (`dumpSource`, `dumpSourceString`, `setBreakPointOnFirstLine`, `bun_aliases`) - Updated `ModuleLoader.zig` to import the new modules ## Testing - Build passes successfully (`bun bd`) - Basic module loading verified with smoke tests - Existing resolve tests continue to pass 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot Co-authored-by: Claude Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/bun.js/AsyncModule.zig | 781 +++++++++++ src/bun.js/HardcodedModule.zig | 431 ++++++ src/bun.js/ModuleLoader.zig | 1759 +------------------------ src/bun.js/RuntimeTranspilerStore.zig | 626 +++++++++ 4 files changed, 1846 insertions(+), 1751 deletions(-) create mode 100644 src/bun.js/AsyncModule.zig create mode 100644 src/bun.js/HardcodedModule.zig create mode 100644 src/bun.js/RuntimeTranspilerStore.zig diff --git a/src/bun.js/AsyncModule.zig b/src/bun.js/AsyncModule.zig new file mode 100644 index 0000000000..7cc369fc3a --- /dev/null +++ b/src/bun.js/AsyncModule.zig @@ -0,0 +1,781 @@ +const debug = Output.scoped(.AsyncModule, .hidden); + +const string = []const u8; + +pub const AsyncModule = struct { + // This is all the state used by the printer to print the module + parse_result: ParseResult, + promise: jsc.Strong.Optional = .empty, + path: Fs.Path, + specifier: string = "", + referrer: string = "", + string_buf: []u8 = &[_]u8{}, + fd: ?StoredFileDescriptorType = null, + package_json: ?*PackageJSON = null, + loader: api.Loader, + hash: u32 = std.math.maxInt(u32), + globalThis: *JSGlobalObject = undefined, + arena: *bun.ArenaAllocator, + + // This is the specific state for making it async + poll_ref: Async.KeepAlive = .{}, + any_task: jsc.AnyTask = undefined, + + pub const Id = u32; + + const PackageDownloadError = struct { + name: []const u8, + resolution: Install.Resolution, + err: anyerror, + url: []const u8, + }; + + const PackageResolveError = struct { + name: []const u8, + err: anyerror, + url: []const u8, + version: Dependency.Version, + }; + + pub const Queue = struct { + map: Map = .{}, + scheduled: u32 = 0, + concurrent_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), + + const DeferredDependencyError = struct { + dependency: Dependency, + root_dependency_id: Install.DependencyID, + err: anyerror, + }; + + pub const Map = std.ArrayListUnmanaged(AsyncModule); + + pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void { + debug("enqueue: {s}", .{opts.specifier}); + var module = AsyncModule.init(opts, globalObject) catch unreachable; + module.poll_ref.ref(this.vm()); + + this.map.append(this.vm().allocator, module) catch unreachable; + this.vm().packageManager().drainDependencyList(); + } + + pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.DependencyID, err: anyerror) void { + var this = bun.cast(*Queue, ctx); + debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(&dependency.name)}); + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + for (root_dependency_ids, 0..) |dep, dep_i| { + if (dep != root_dependency_id) continue; + module.resolveError( + this.vm(), + module.parse_result.pending_imports.items(.import_record_id)[dep_i], + .{ + .name = this.vm().packageManager().lockfile.str(&dependency.name), + .err = err, + .url = "", + .version = dependency.version, + }, + ) catch unreachable; + continue :outer; + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void { + debug("onWake", .{}); + var this = bun.cast(*Queue, ctx); + this.vm().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(this)); + } + + pub fn onPoll(this: *Queue) void { + debug("onPoll", .{}); + this.runTasks(); + this.pollModules(); + } + + pub fn runTasks(this: *Queue) void { + var pm = this.vm().packageManager(); + + if (Output.enable_ansi_colors_stderr) { + pm.startProgressBarIfNone(); + pm.runTasks( + *Queue, + this, + .{ + .onExtract = {}, + .onResolve = onResolve, + .onPackageManifestError = onPackageManifestError, + .onPackageDownloadError = onPackageDownloadError, + .progress_bar = true, + }, + true, + PackageManager.Options.LogLevel.default, + ) catch unreachable; + } else { + pm.runTasks( + *Queue, + this, + .{ + .onExtract = {}, + .onResolve = onResolve, + .onPackageManifestError = onPackageManifestError, + .onPackageDownloadError = onPackageDownloadError, + }, + true, + PackageManager.Options.LogLevel.default_no_progress, + ) catch unreachable; + } + } + + pub fn onResolve(_: *Queue) void { + debug("onResolve", .{}); + } + + pub fn onPackageManifestError( + this: *Queue, + name: []const u8, + err: anyerror, + url: []const u8, + ) void { + debug("onPackageManifestError: {s}", .{name}); + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const tags = module.parse_result.pending_imports.items(.tag); + for (tags, 0..) |tag, tag_i| { + if (tag == .resolve) { + const esms = module.parse_result.pending_imports.items(.esm); + const esm = esms[tag_i]; + const string_bufs = module.parse_result.pending_imports.items(.string_buf); + + if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue; + + const versions = module.parse_result.pending_imports.items(.dependency); + + module.resolveError( + this.vm(), + module.parse_result.pending_imports.items(.import_record_id)[tag_i], + .{ + .name = name, + .err = err, + .url = url, + .version = versions[tag_i], + }, + ) catch unreachable; + continue :outer; + } + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + + pub fn onPackageDownloadError( + this: *Queue, + package_id: Install.PackageID, + name: []const u8, + resolution: *const Install.Resolution, + err: anyerror, + url: []const u8, + ) void { + debug("onPackageDownloadError: {s}", .{name}); + + const resolution_ids = this.vm().packageManager().lockfile.buffers.resolutions.items; + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const record_ids = module.parse_result.pending_imports.items(.import_record_id); + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + for (root_dependency_ids, 0..) |dependency_id, import_id| { + if (resolution_ids[dependency_id] != package_id) continue; + module.downloadError( + this.vm(), + record_ids[import_id], + .{ + .name = name, + .resolution = resolution.*, + .err = err, + .url = url, + }, + ) catch unreachable; + continue :outer; + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + + pub fn pollModules(this: *Queue) void { + var pm = this.vm().packageManager(); + if (pm.pending_tasks.load(.monotonic) > 0) return; + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + + for (modules) |mod| { + var module = mod; + var tags = module.parse_result.pending_imports.items(.tag); + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + // var esms = module.parse_result.pending_imports.items(.esm); + // var versions = module.parse_result.pending_imports.items(.dependency); + var done_count: usize = 0; + for (tags, 0..) |tag, tag_i| { + const root_id = root_dependency_ids[tag_i]; + const resolution_ids = pm.lockfile.buffers.resolutions.items; + if (root_id >= resolution_ids.len) continue; + const package_id = resolution_ids[root_id]; + + switch (tag) { + .resolve => { + if (package_id == Install.invalid_package_id) { + continue; + } + + // if we get here, the package has already been resolved. + tags[tag_i] = .download; + }, + .download => { + if (package_id == Install.invalid_package_id) { + unreachable; + } + }, + .done => { + done_count += 1; + continue; + }, + } + + if (package_id == Install.invalid_package_id) { + continue; + } + + const package = pm.lockfile.packages.get(package_id); + bun.assert(package.resolution.tag != .root); + + var name_and_version_hash: ?u64 = null; + var patchfile_hash: ?u64 = null; + switch (pm.determinePreinstallState(package, pm.lockfile, &name_and_version_hash, &patchfile_hash)) { + .done => { + // we are only truly done if all the dependencies are done. + const current_tasks = pm.total_tasks; + // so if enqueuing all the dependencies produces no new tasks, we are done. + pm.enqueueDependencyList(package.dependencies); + if (current_tasks == pm.total_tasks) { + tags[tag_i] = .done; + done_count += 1; + } + }, + .extracting => { + // we are extracting the package + // we need to wait for the next poll + continue; + }, + .extract => {}, + else => {}, + } + } + + if (done_count == tags.len) { + module.done(this.vm()); + } else { + modules[i] = module; + i += 1; + } + } + this.map.items.len = i; + if (i == 0) { + // ensure we always end the progress bar + this.vm().packageManager().endProgressBar(); + } + } + + pub fn vm(this: *Queue) *VirtualMachine { + return @alignCast(@fieldParentPtr("modules", this)); + } + + comptime { + // Ensure VirtualMachine has a field named "modules" of the correct type + // If this fails, the @fieldParentPtr in vm() above needs to be updated + const VM = @import("./VirtualMachine.zig"); + if (!@hasField(VM, "modules")) { + @compileError("VirtualMachine must have a 'modules' field for AsyncModule.Queue.vm() to work"); + } + } + }; + + pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule { + // var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice(); + // var expr_blocks = js_ast.Expr.Data.toOwnedSlice(); + const this_promise = JSValue.createInternalPromise(globalObject); + const promise = jsc.Strong.Optional.create(this_promise, globalObject); + + var buf = bun.StringBuilder{}; + buf.count(opts.referrer); + buf.count(opts.specifier); + buf.count(opts.path.text); + + try buf.allocate(bun.default_allocator); + opts.promise_ptr.?.* = this_promise.asInternalPromise().?; + const referrer = buf.append(opts.referrer); + const specifier = buf.append(opts.specifier); + const path = Fs.Path.init(buf.append(opts.path.text)); + + return AsyncModule{ + .parse_result = opts.parse_result, + .promise = promise, + .path = path, + .specifier = specifier, + .referrer = referrer, + .fd = opts.fd, + .package_json = opts.package_json, + .loader = opts.loader.toAPI(), + .string_buf = buf.allocatedSlice(), + // .stmt_blocks = stmt_blocks, + // .expr_blocks = expr_blocks, + .globalThis = globalObject, + .arena = opts.arena, + }; + } + + pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void { + var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable; + clone.* = this.*; + jsc_vm.modules.scheduled += 1; + clone.any_task = jsc.AnyTask.New(AsyncModule, onDone).init(clone); + jsc_vm.enqueueTask(jsc.Task.init(&clone.any_task)); + } + + pub fn onDone(this: *AsyncModule) void { + jsc.markBinding(@src()); + var jsc_vm = this.globalThis.bunVM(); + jsc_vm.modules.scheduled -= 1; + if (jsc_vm.modules.scheduled == 0) { + jsc_vm.packageManager().endProgressBar(); + } + var log = logger.Log.init(jsc_vm.allocator); + defer log.deinit(); + var errorable: jsc.ErrorableResolvedSource = undefined; + this.poll_ref.unref(jsc_vm); + outer: { + errorable = jsc.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { + switch (err) { + error.JSError => { + errorable = .err(error.JSError, this.globalThis.takeError(error.JSError)); + break :outer; + }, + else => { + VirtualMachine.processFetchLog( + this.globalThis, + bun.String.init(this.specifier), + bun.String.init(this.referrer), + &log, + &errorable, + err, + ); + break :outer; + }, + } + }); + } + + var spec = bun.String.init(ZigString.init(this.specifier).withEncoding()); + var ref = bun.String.init(ZigString.init(this.referrer).withEncoding()); + bun.jsc.fromJSHostCallGeneric(this.globalThis, @src(), Bun__onFulfillAsyncModule, .{ + this.globalThis, + this.promise.get().?, + &errorable, + &spec, + &ref, + }) catch {}; + this.deinit(); + jsc_vm.allocator.destroy(this); + } + + pub fn fulfill( + globalThis: *JSGlobalObject, + promise: JSValue, + resolved_source: *ResolvedSource, + err: ?anyerror, + specifier_: bun.String, + referrer_: bun.String, + log: *logger.Log, + ) bun.JSError!void { + jsc.markBinding(@src()); + var specifier = specifier_; + var referrer = referrer_; + var scope: jsc.CatchScope = undefined; + scope.init(globalThis, @src()); + defer { + specifier.deref(); + referrer.deref(); + scope.deinit(); + } + + var errorable: jsc.ErrorableResolvedSource = undefined; + if (err) |e| { + defer { + if (resolved_source.source_code_needs_deref) { + resolved_source.source_code_needs_deref = false; + resolved_source.source_code.deref(); + } + } + + if (e == error.JSError) { + errorable = jsc.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError)); + } else { + VirtualMachine.processFetchLog( + globalThis, + specifier, + referrer, + log, + &errorable, + e, + ); + } + } else { + errorable = jsc.ErrorableResolvedSource.ok(resolved_source.*); + } + log.deinit(); + + debug("fulfill: {any}", .{specifier}); + + try bun.jsc.fromJSHostCallGeneric(globalThis, @src(), Bun__onFulfillAsyncModule, .{ + globalThis, + promise, + &errorable, + &specifier, + &referrer, + }); + } + + pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { + const globalThis = this.globalThis; + + const msg: []u8 = try switch (result.err) { + error.PackageManifestHTTP400 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 400 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP401 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 401 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP402 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 402 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP403 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 403 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP404 => std.fmt.allocPrint( + bun.default_allocator, + "Package '{s}' was not found", + .{result.name}, + ), + error.PackageManifestHTTP4xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 4xx while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP5xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 5xx while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.DistTagNotFound, error.NoMatchingVersion => brk: { + const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.version.isExact()) + "Version not found" + else if (result.version.tag == .npm and !result.version.value.npm.version.isExact()) + "No matching version found" + else + "No match found"; + + break :brk std.fmt.allocPrint( + bun.default_allocator, + "{s} '{s}' for package '{s}' (but package exists)", + .{ prefix, vm.packageManager().lockfile.str(&result.version.literal), result.name }, + ); + }, + else => |err| std.fmt.allocPrint( + bun.default_allocator, + "{s} resolving package '{s}' at '{s}'", + .{ bun.asByteSlice(@errorName(err)), result.name, result.url }, + ), + }; + defer bun.default_allocator.free(msg); + + const name: []const u8 = switch (result.err) { + error.NoMatchingVersion => "PackageVersionNotFound", + error.DistTagNotFound => "PackageTagNotFound", + error.PackageManifestHTTP403 => "PackageForbidden", + error.PackageManifestHTTP404 => "PackageNotFound", + else => "PackageResolveError", + }; + + var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); + if (result.url.len > 0) + error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); + const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; + error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); + if (location.line_text) |line_text| { + error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); + } + error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); + if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) { + error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toJS(globalThis)); + } + + const promise_value = this.promise.swap(); + var promise = promise_value.asInternalPromise().?; + promise_value.ensureStillAlive(); + this.poll_ref.unref(vm); + this.deinit(); + promise.rejectAsHandled(globalThis, error_instance); + } + pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { + const globalThis = this.globalThis; + + const msg_args = .{ + result.name, + result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), + }; + + const msg: []u8 = try switch (result.err) { + error.TarballHTTP400 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 400 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP401 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 401 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP402 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 402 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP403 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 403 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP404 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 404 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP4xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 4xx downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP5xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 5xx downloading package '{s}@{any}'", + msg_args, + ), + error.TarballFailedToExtract => std.fmt.allocPrint( + bun.default_allocator, + "Failed to extract tarball for package '{s}@{any}'", + msg_args, + ), + else => |err| std.fmt.allocPrint( + bun.default_allocator, + "{s} downloading package '{s}@{any}'", + .{ + bun.asByteSlice(@errorName(err)), + result.name, + result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), + }, + ), + }; + defer bun.default_allocator.free(msg); + + const name: []const u8 = switch (result.err) { + error.TarballFailedToExtract => "PackageExtractionError", + error.TarballHTTP403 => "TarballForbiddenError", + error.TarballHTTP404 => "TarballNotFoundError", + else => "TarballDownloadError", + }; + + var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); + if (result.url.len > 0) + error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); + if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) { + error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); + } + + const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; + error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init( + this.parse_result.ast.import_records.at(import_record_id).path.text, + ).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); + if (location.line_text) |line_text| { + error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); + } + error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); + + const promise_value = this.promise.swap(); + var promise = promise_value.asInternalPromise().?; + promise_value.ensureStillAlive(); + this.poll_ref.unref(vm); + this.deinit(); + promise.rejectAsHandled(globalThis, error_instance); + } + + pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource { + debug("resumeLoadingModule: {s}", .{this.specifier}); + var parse_result = this.parse_result; + const path = this.path; + var jsc_vm = VirtualMachine.get(); + const specifier = this.specifier; + const old_log = jsc_vm.log; + + jsc_vm.transpiler.linker.log = log; + jsc_vm.transpiler.log = log; + jsc_vm.transpiler.resolver.log = log; + jsc_vm.packageManager().log = log; + defer { + jsc_vm.transpiler.linker.log = old_log; + jsc_vm.transpiler.log = old_log; + jsc_vm.transpiler.resolver.log = old_log; + jsc_vm.packageManager().log = old_log; + } + + // We _must_ link because: + // - node_modules bundle won't be properly + try jsc_vm.transpiler.linker.link( + path, + &parse_result, + jsc_vm.origin, + .absolute_path, + false, + true, + ); + this.parse_result = parse_result; + + var printer = VirtualMachine.source_code_printer.?.*; + printer.ctx.reset(); + + { + var mapper = jsc_vm.sourceMapHandler(&printer); + defer VirtualMachine.source_code_printer.?.* = printer; + _ = try jsc_vm.transpiler.printWithSourceMap( + parse_result, + @TypeOf(&printer), + &printer, + .esm_ascii, + mapper.get(), + ); + } + + if (comptime Environment.dump_source) { + dumpSource(jsc_vm, specifier, &printer); + } + + if (jsc_vm.isWatcherEnabled()) { + var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, bun.String.init(specifier), path.text, null, false); + + if (parse_result.input_fd) |fd_| { + if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + _ = jsc_vm.bun_watcher.addFile( + fd_, + path.text, + this.hash, + options.Loader.fromAPI(this.loader), + .invalid, + this.package_json, + true, + ); + } + } + + resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; + + return resolved_source; + } + + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.cloneLatin1(printer.ctx.getWritten()), + .specifier = String.init(specifier), + .source_url = String.init(path.text), + .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, + }; + } + + pub fn deinit(this: *AsyncModule) void { + this.promise.deinit(); + this.parse_result.deinit(); + this.arena.deinit(); + this.globalThis.bunVM().allocator.destroy(this.arena); + // bun.default_allocator.free(this.stmt_blocks); + // bun.default_allocator.free(this.expr_blocks); + + bun.default_allocator.free(this.string_buf); + } + + extern "c" fn Bun__onFulfillAsyncModule( + globalObject: *JSGlobalObject, + promiseValue: JSValue, + res: *jsc.ErrorableResolvedSource, + specifier: *bun.String, + referrer: *bun.String, + ) void; +}; + +const Dependency = @import("../install/dependency.zig"); +const Fs = @import("../fs.zig"); +const options = @import("../options.zig"); +const std = @import("std"); +const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; +const dumpSource = @import("./RuntimeTranspilerStore.zig").dumpSource; + +const Install = @import("../install/install.zig"); +const PackageManager = @import("../install/install.zig").PackageManager; + +const bun = @import("bun"); +const Async = bun.Async; +const Environment = bun.Environment; +const Output = bun.Output; +const StoredFileDescriptorType = bun.StoredFileDescriptorType; +const String = bun.String; +const logger = bun.logger; +const strings = bun.strings; +const ParseResult = bun.transpiler.ParseResult; +const api = bun.schema.api; + +const jsc = bun.jsc; +const JSGlobalObject = bun.jsc.JSGlobalObject; +const JSValue = bun.jsc.JSValue; +const ResolvedSource = bun.jsc.ResolvedSource; +const VirtualMachine = bun.jsc.VirtualMachine; +const ZigString = bun.jsc.ZigString; diff --git a/src/bun.js/HardcodedModule.zig b/src/bun.js/HardcodedModule.zig new file mode 100644 index 0000000000..698b400fb7 --- /dev/null +++ b/src/bun.js/HardcodedModule.zig @@ -0,0 +1,431 @@ +const string = []const u8; + +pub const HardcodedModule = enum { + bun, + @"abort-controller", + @"bun:app", + @"bun:ffi", + @"bun:jsc", + @"bun:main", + @"bun:test", + @"bun:wrap", + @"bun:sqlite", + @"node:assert", + @"node:assert/strict", + @"node:async_hooks", + @"node:buffer", + @"node:child_process", + @"node:console", + @"node:constants", + @"node:crypto", + @"node:dns", + @"node:dns/promises", + @"node:domain", + @"node:events", + @"node:fs", + @"node:fs/promises", + @"node:http", + @"node:https", + @"node:module", + @"node:net", + @"node:os", + @"node:path", + @"node:path/posix", + @"node:path/win32", + @"node:perf_hooks", + @"node:process", + @"node:querystring", + @"node:readline", + @"node:readline/promises", + @"node:stream", + @"node:stream/consumers", + @"node:stream/promises", + @"node:stream/web", + @"node:string_decoder", + @"node:test", + @"node:timers", + @"node:timers/promises", + @"node:tls", + @"node:tty", + @"node:url", + @"node:util", + @"node:util/types", + @"node:vm", + @"node:wasi", + @"node:zlib", + @"node:worker_threads", + @"node:punycode", + undici, + ws, + @"isomorphic-fetch", + @"node-fetch", + vercel_fetch, + @"utf-8-validate", + @"node:v8", + @"node:trace_events", + @"node:repl", + @"node:inspector", + @"node:http2", + @"node:diagnostics_channel", + @"node:dgram", + @"node:cluster", + @"node:_stream_duplex", + @"node:_stream_passthrough", + @"node:_stream_readable", + @"node:_stream_transform", + @"node:_stream_wrap", + @"node:_stream_writable", + @"node:_tls_common", + @"node:_http_agent", + @"node:_http_client", + @"node:_http_common", + @"node:_http_incoming", + @"node:_http_outgoing", + @"node:_http_server", + /// This is gated behind '--expose-internals' + @"bun:internal-for-testing", + + /// The module loader first uses `Aliases` to get a single string during + /// resolution, then maps that single string to the actual module. + /// Do not include aliases here; Those go in `Aliases`. + pub const map = bun.ComptimeStringMap(HardcodedModule, [_]struct { []const u8, HardcodedModule }{ + // Bun + .{ "bun", .bun }, + .{ "bun:app", .@"bun:app" }, + .{ "bun:ffi", .@"bun:ffi" }, + .{ "bun:jsc", .@"bun:jsc" }, + .{ "bun:main", .@"bun:main" }, + .{ "bun:test", .@"bun:test" }, + .{ "bun:sqlite", .@"bun:sqlite" }, + .{ "bun:wrap", .@"bun:wrap" }, + .{ "bun:internal-for-testing", .@"bun:internal-for-testing" }, + // Node.js + .{ "node:assert", .@"node:assert" }, + .{ "node:assert/strict", .@"node:assert/strict" }, + .{ "node:async_hooks", .@"node:async_hooks" }, + .{ "node:buffer", .@"node:buffer" }, + .{ "node:child_process", .@"node:child_process" }, + .{ "node:cluster", .@"node:cluster" }, + .{ "node:console", .@"node:console" }, + .{ "node:constants", .@"node:constants" }, + .{ "node:crypto", .@"node:crypto" }, + .{ "node:dgram", .@"node:dgram" }, + .{ "node:diagnostics_channel", .@"node:diagnostics_channel" }, + .{ "node:dns", .@"node:dns" }, + .{ "node:dns/promises", .@"node:dns/promises" }, + .{ "node:domain", .@"node:domain" }, + .{ "node:events", .@"node:events" }, + .{ "node:fs", .@"node:fs" }, + .{ "node:fs/promises", .@"node:fs/promises" }, + .{ "node:http", .@"node:http" }, + .{ "node:http2", .@"node:http2" }, + .{ "node:https", .@"node:https" }, + .{ "node:inspector", .@"node:inspector" }, + .{ "node:module", .@"node:module" }, + .{ "node:net", .@"node:net" }, + .{ "node:readline", .@"node:readline" }, + .{ "node:test", .@"node:test" }, + .{ "node:os", .@"node:os" }, + .{ "node:path", .@"node:path" }, + .{ "node:path/posix", .@"node:path/posix" }, + .{ "node:path/win32", .@"node:path/win32" }, + .{ "node:perf_hooks", .@"node:perf_hooks" }, + .{ "node:process", .@"node:process" }, + .{ "node:punycode", .@"node:punycode" }, + .{ "node:querystring", .@"node:querystring" }, + .{ "node:readline/promises", .@"node:readline/promises" }, + .{ "node:repl", .@"node:repl" }, + .{ "node:stream", .@"node:stream" }, + .{ "node:stream/consumers", .@"node:stream/consumers" }, + .{ "node:stream/promises", .@"node:stream/promises" }, + .{ "node:stream/web", .@"node:stream/web" }, + .{ "node:string_decoder", .@"node:string_decoder" }, + .{ "node:timers", .@"node:timers" }, + .{ "node:timers/promises", .@"node:timers/promises" }, + .{ "node:tls", .@"node:tls" }, + .{ "node:trace_events", .@"node:trace_events" }, + .{ "node:tty", .@"node:tty" }, + .{ "node:url", .@"node:url" }, + .{ "node:util", .@"node:util" }, + .{ "node:util/types", .@"node:util/types" }, + .{ "node:v8", .@"node:v8" }, + .{ "node:vm", .@"node:vm" }, + .{ "node:wasi", .@"node:wasi" }, + .{ "node:worker_threads", .@"node:worker_threads" }, + .{ "node:zlib", .@"node:zlib" }, + .{ "node:_stream_duplex", .@"node:_stream_duplex" }, + .{ "node:_stream_passthrough", .@"node:_stream_passthrough" }, + .{ "node:_stream_readable", .@"node:_stream_readable" }, + .{ "node:_stream_transform", .@"node:_stream_transform" }, + .{ "node:_stream_wrap", .@"node:_stream_wrap" }, + .{ "node:_stream_writable", .@"node:_stream_writable" }, + .{ "node:_tls_common", .@"node:_tls_common" }, + .{ "node:_http_agent", .@"node:_http_agent" }, + .{ "node:_http_client", .@"node:_http_client" }, + .{ "node:_http_common", .@"node:_http_common" }, + .{ "node:_http_incoming", .@"node:_http_incoming" }, + .{ "node:_http_outgoing", .@"node:_http_outgoing" }, + .{ "node:_http_server", .@"node:_http_server" }, + + .{ "node-fetch", HardcodedModule.@"node-fetch" }, + .{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" }, + .{ "undici", HardcodedModule.undici }, + .{ "ws", HardcodedModule.ws }, + .{ "@vercel/fetch", HardcodedModule.vercel_fetch }, + .{ "utf-8-validate", HardcodedModule.@"utf-8-validate" }, + .{ "abort-controller", HardcodedModule.@"abort-controller" }, + }); + + /// Contains the list of built-in modules from the perspective of the module + /// loader. This logic is duplicated for `isBuiltinModule` and the like. + pub const Alias = struct { + path: [:0]const u8, + tag: ImportRecord.Tag = .builtin, + node_builtin: bool = false, + node_only_prefix: bool = false, + + fn nodeEntry(comptime path: [:0]const u8) struct { string, Alias } { + return .{ + path, + .{ + .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, + .node_builtin = true, + }, + }; + } + fn nodeEntryOnlyPrefix(comptime path: [:0]const u8) struct { string, Alias } { + return .{ + path, + .{ + .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, + .node_builtin = true, + .node_only_prefix = true, + }, + }; + } + fn entry(comptime path: [:0]const u8) struct { string, Alias } { + return .{ path, .{ .path = path } }; + } + + // Applied to both --target=bun and --target=node + const common_alias_kvs = [_]struct { string, Alias }{ + nodeEntry("node:assert"), + nodeEntry("node:assert/strict"), + nodeEntry("node:async_hooks"), + nodeEntry("node:buffer"), + nodeEntry("node:child_process"), + nodeEntry("node:cluster"), + nodeEntry("node:console"), + nodeEntry("node:constants"), + nodeEntry("node:crypto"), + nodeEntry("node:dgram"), + nodeEntry("node:diagnostics_channel"), + nodeEntry("node:dns"), + nodeEntry("node:dns/promises"), + nodeEntry("node:domain"), + nodeEntry("node:events"), + nodeEntry("node:fs"), + nodeEntry("node:fs/promises"), + nodeEntry("node:http"), + nodeEntry("node:http2"), + nodeEntry("node:https"), + nodeEntry("node:inspector"), + nodeEntry("node:module"), + nodeEntry("node:net"), + nodeEntry("node:os"), + nodeEntry("node:path"), + nodeEntry("node:path/posix"), + nodeEntry("node:path/win32"), + nodeEntry("node:perf_hooks"), + nodeEntry("node:process"), + nodeEntry("node:punycode"), + nodeEntry("node:querystring"), + nodeEntry("node:readline"), + nodeEntry("node:readline/promises"), + nodeEntry("node:repl"), + nodeEntry("node:stream"), + nodeEntry("node:stream/consumers"), + nodeEntry("node:stream/promises"), + nodeEntry("node:stream/web"), + nodeEntry("node:string_decoder"), + nodeEntry("node:timers"), + nodeEntry("node:timers/promises"), + nodeEntry("node:tls"), + nodeEntry("node:trace_events"), + nodeEntry("node:tty"), + nodeEntry("node:url"), + nodeEntry("node:util"), + nodeEntry("node:util/types"), + nodeEntry("node:v8"), + nodeEntry("node:vm"), + nodeEntry("node:wasi"), + nodeEntry("node:worker_threads"), + nodeEntry("node:zlib"), + // New Node.js builtins only resolve from the prefixed one. + nodeEntryOnlyPrefix("node:test"), + + nodeEntry("assert"), + nodeEntry("assert/strict"), + nodeEntry("async_hooks"), + nodeEntry("buffer"), + nodeEntry("child_process"), + nodeEntry("cluster"), + nodeEntry("console"), + nodeEntry("constants"), + nodeEntry("crypto"), + nodeEntry("dgram"), + nodeEntry("diagnostics_channel"), + nodeEntry("dns"), + nodeEntry("dns/promises"), + nodeEntry("domain"), + nodeEntry("events"), + nodeEntry("fs"), + nodeEntry("fs/promises"), + nodeEntry("http"), + nodeEntry("http2"), + nodeEntry("https"), + nodeEntry("inspector"), + nodeEntry("module"), + nodeEntry("net"), + nodeEntry("os"), + nodeEntry("path"), + nodeEntry("path/posix"), + nodeEntry("path/win32"), + nodeEntry("perf_hooks"), + nodeEntry("process"), + nodeEntry("punycode"), + nodeEntry("querystring"), + nodeEntry("readline"), + nodeEntry("readline/promises"), + nodeEntry("repl"), + nodeEntry("stream"), + nodeEntry("stream/consumers"), + nodeEntry("stream/promises"), + nodeEntry("stream/web"), + nodeEntry("string_decoder"), + nodeEntry("timers"), + nodeEntry("timers/promises"), + nodeEntry("tls"), + nodeEntry("trace_events"), + nodeEntry("tty"), + nodeEntry("url"), + nodeEntry("util"), + nodeEntry("util/types"), + nodeEntry("v8"), + nodeEntry("vm"), + nodeEntry("wasi"), + nodeEntry("worker_threads"), + nodeEntry("zlib"), + + nodeEntry("node:_http_agent"), + nodeEntry("node:_http_client"), + nodeEntry("node:_http_common"), + nodeEntry("node:_http_incoming"), + nodeEntry("node:_http_outgoing"), + nodeEntry("node:_http_server"), + + nodeEntry("_http_agent"), + nodeEntry("_http_client"), + nodeEntry("_http_common"), + nodeEntry("_http_incoming"), + nodeEntry("_http_outgoing"), + nodeEntry("_http_server"), + + // sys is a deprecated alias for util + .{ "sys", .{ .path = "node:util", .node_builtin = true } }, + .{ "node:sys", .{ .path = "node:util", .node_builtin = true } }, + + // These are returned in builtinModules, but probably not many + // packages use them so we will just alias them. + .{ "node:_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, + .{ "node:_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, + .{ "node:_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, + .{ "node:_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, + .{ "node:_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, + .{ "node:_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, + .{ "node:_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, + .{ "node:_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, + .{ "_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, + .{ "_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, + .{ "_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, + .{ "_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, + .{ "_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, + .{ "_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, + .{ "_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, + .{ "_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, + }; + + const bun_extra_alias_kvs = [_]struct { string, Alias }{ + .{ "bun", .{ .path = "bun", .tag = .bun } }, + .{ "bun:test", .{ .path = "bun:test" } }, + .{ "bun:app", .{ .path = "bun:app" } }, + .{ "bun:ffi", .{ .path = "bun:ffi" } }, + .{ "bun:jsc", .{ .path = "bun:jsc" } }, + .{ "bun:sqlite", .{ .path = "bun:sqlite" } }, + .{ "bun:wrap", .{ .path = "bun:wrap" } }, + .{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } }, + .{ "ffi", .{ .path = "bun:ffi" } }, + + // inspector/promises is not implemented, it is an alias of inspector + .{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, + .{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, + + // Thirdparty packages we override + .{ "@vercel/fetch", .{ .path = "@vercel/fetch" } }, + .{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } }, + .{ "node-fetch", .{ .path = "node-fetch" } }, + .{ "undici", .{ .path = "undici" } }, + .{ "utf-8-validate", .{ .path = "utf-8-validate" } }, + .{ "ws", .{ .path = "ws" } }, + .{ "ws/lib/websocket", .{ .path = "ws" } }, + + // Polyfills we force to native + .{ "abort-controller", .{ .path = "abort-controller" } }, + .{ "abort-controller/polyfill", .{ .path = "abort-controller" } }, + + // To force Next.js to not use bundled dependencies. + .{ "next/dist/compiled/ws", .{ .path = "ws" } }, + .{ "next/dist/compiled/node-fetch", .{ .path = "node-fetch" } }, + .{ "next/dist/compiled/undici", .{ .path = "undici" } }, + }; + + const bun_test_extra_alias_kvs = [_]struct { string, Alias }{ + .{ "@jest/globals", .{ .path = "bun:test" } }, + .{ "vitest", .{ .path = "bun:test" } }, + }; + + const node_extra_alias_kvs = [_]struct { string, Alias }{ + nodeEntry("node:inspector/promises"), + nodeEntry("inspector/promises"), + }; + + const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs); + pub const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs); + const bun_test_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs ++ bun_test_extra_alias_kvs); + + const Cfg = struct { rewrite_jest_for_tests: bool = false }; + pub fn has(name: []const u8, target: options.Target, cfg: Cfg) bool { + return get(name, target, cfg) != null; + } + + pub fn get(name: []const u8, target: options.Target, cfg: Cfg) ?Alias { + if (target.isBun()) { + if (cfg.rewrite_jest_for_tests) { + return bun_test_aliases.get(name); + } else { + return bun_aliases.get(name); + } + } else if (target.isNode()) { + return node_aliases.get(name); + } + return null; + } + }; +}; + +const bun = @import("bun"); +const options = @import("../options.zig"); +const std = @import("std"); + +const ast = @import("../import_record.zig"); +const ImportRecord = ast.ImportRecord; diff --git a/src/bun.js/ModuleLoader.zig b/src/bun.js/ModuleLoader.zig index d1ce74545a..1cfaaedc6a 100644 --- a/src/bun.js/ModuleLoader.zig +++ b/src/bun.js/ModuleLoader.zig @@ -1,6 +1,9 @@ const ModuleLoader = @This(); pub const node_fallbacks = @import("../node_fallbacks.zig"); +pub const AsyncModule = @import("./AsyncModule.zig").AsyncModule; +pub const RuntimeTranspilerStore = @import("./RuntimeTranspilerStore.zig").RuntimeTranspilerStore; +pub const HardcodedModule = @import("./HardcodedModule.zig").HardcodedModule; transpile_source_code_arena: ?*bun.ArenaAllocator = null, eval_source: ?*logger.Source = null, @@ -66,745 +69,6 @@ pub fn resolveEmbeddedFile(vm: *VirtualMachine, input_path: []const u8, extname: return bun.path.joinAbs(bun.fs.FileSystem.instance.fs.tmpdirPath(), .auto, tmpfilename); } -pub const AsyncModule = struct { - // This is all the state used by the printer to print the module - parse_result: ParseResult, - promise: jsc.Strong.Optional = .empty, - path: Fs.Path, - specifier: string = "", - referrer: string = "", - string_buf: []u8 = &[_]u8{}, - fd: ?StoredFileDescriptorType = null, - package_json: ?*PackageJSON = null, - loader: api.Loader, - hash: u32 = std.math.maxInt(u32), - globalThis: *JSGlobalObject = undefined, - arena: *bun.ArenaAllocator, - - // This is the specific state for making it async - poll_ref: Async.KeepAlive = .{}, - any_task: jsc.AnyTask = undefined, - - pub const Id = u32; - - const PackageDownloadError = struct { - name: []const u8, - resolution: Install.Resolution, - err: anyerror, - url: []const u8, - }; - - const PackageResolveError = struct { - name: []const u8, - err: anyerror, - url: []const u8, - version: Dependency.Version, - }; - - pub const Queue = struct { - map: Map = .{}, - scheduled: u32 = 0, - concurrent_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - - const DeferredDependencyError = struct { - dependency: Dependency, - root_dependency_id: Install.DependencyID, - err: anyerror, - }; - - pub const Map = std.ArrayListUnmanaged(AsyncModule); - - pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void { - debug("enqueue: {s}", .{opts.specifier}); - var module = AsyncModule.init(opts, globalObject) catch unreachable; - module.poll_ref.ref(this.vm()); - - this.map.append(this.vm().allocator, module) catch unreachable; - this.vm().packageManager().drainDependencyList(); - } - - pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.DependencyID, err: anyerror) void { - var this = bun.cast(*Queue, ctx); - debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(&dependency.name)}); - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - for (root_dependency_ids, 0..) |dep, dep_i| { - if (dep != root_dependency_id) continue; - module.resolveError( - this.vm(), - module.parse_result.pending_imports.items(.import_record_id)[dep_i], - .{ - .name = this.vm().packageManager().lockfile.str(&dependency.name), - .err = err, - .url = "", - .version = dependency.version, - }, - ) catch unreachable; - continue :outer; - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void { - debug("onWake", .{}); - var this = bun.cast(*Queue, ctx); - this.vm().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(this)); - } - - pub fn onPoll(this: *Queue) void { - debug("onPoll", .{}); - this.runTasks(); - this.pollModules(); - } - - pub fn runTasks(this: *Queue) void { - var pm = this.vm().packageManager(); - - if (Output.enable_ansi_colors_stderr) { - pm.startProgressBarIfNone(); - pm.runTasks( - *Queue, - this, - .{ - .onExtract = {}, - .onResolve = onResolve, - .onPackageManifestError = onPackageManifestError, - .onPackageDownloadError = onPackageDownloadError, - .progress_bar = true, - }, - true, - PackageManager.Options.LogLevel.default, - ) catch unreachable; - } else { - pm.runTasks( - *Queue, - this, - .{ - .onExtract = {}, - .onResolve = onResolve, - .onPackageManifestError = onPackageManifestError, - .onPackageDownloadError = onPackageDownloadError, - }, - true, - PackageManager.Options.LogLevel.default_no_progress, - ) catch unreachable; - } - } - - pub fn onResolve(_: *Queue) void { - debug("onResolve", .{}); - } - - pub fn onPackageManifestError( - this: *Queue, - name: []const u8, - err: anyerror, - url: []const u8, - ) void { - debug("onPackageManifestError: {s}", .{name}); - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const tags = module.parse_result.pending_imports.items(.tag); - for (tags, 0..) |tag, tag_i| { - if (tag == .resolve) { - const esms = module.parse_result.pending_imports.items(.esm); - const esm = esms[tag_i]; - const string_bufs = module.parse_result.pending_imports.items(.string_buf); - - if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue; - - const versions = module.parse_result.pending_imports.items(.dependency); - - module.resolveError( - this.vm(), - module.parse_result.pending_imports.items(.import_record_id)[tag_i], - .{ - .name = name, - .err = err, - .url = url, - .version = versions[tag_i], - }, - ) catch unreachable; - continue :outer; - } - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - - pub fn onPackageDownloadError( - this: *Queue, - package_id: Install.PackageID, - name: []const u8, - resolution: *const Install.Resolution, - err: anyerror, - url: []const u8, - ) void { - debug("onPackageDownloadError: {s}", .{name}); - - const resolution_ids = this.vm().packageManager().lockfile.buffers.resolutions.items; - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const record_ids = module.parse_result.pending_imports.items(.import_record_id); - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - for (root_dependency_ids, 0..) |dependency_id, import_id| { - if (resolution_ids[dependency_id] != package_id) continue; - module.downloadError( - this.vm(), - record_ids[import_id], - .{ - .name = name, - .resolution = resolution.*, - .err = err, - .url = url, - }, - ) catch unreachable; - continue :outer; - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - - pub fn pollModules(this: *Queue) void { - var pm = this.vm().packageManager(); - if (pm.pending_tasks.load(.monotonic) > 0) return; - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - - for (modules) |mod| { - var module = mod; - var tags = module.parse_result.pending_imports.items(.tag); - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - // var esms = module.parse_result.pending_imports.items(.esm); - // var versions = module.parse_result.pending_imports.items(.dependency); - var done_count: usize = 0; - for (tags, 0..) |tag, tag_i| { - const root_id = root_dependency_ids[tag_i]; - const resolution_ids = pm.lockfile.buffers.resolutions.items; - if (root_id >= resolution_ids.len) continue; - const package_id = resolution_ids[root_id]; - - switch (tag) { - .resolve => { - if (package_id == Install.invalid_package_id) { - continue; - } - - // if we get here, the package has already been resolved. - tags[tag_i] = .download; - }, - .download => { - if (package_id == Install.invalid_package_id) { - unreachable; - } - }, - .done => { - done_count += 1; - continue; - }, - } - - if (package_id == Install.invalid_package_id) { - continue; - } - - const package = pm.lockfile.packages.get(package_id); - bun.assert(package.resolution.tag != .root); - - var name_and_version_hash: ?u64 = null; - var patchfile_hash: ?u64 = null; - switch (pm.determinePreinstallState(package, pm.lockfile, &name_and_version_hash, &patchfile_hash)) { - .done => { - // we are only truly done if all the dependencies are done. - const current_tasks = pm.total_tasks; - // so if enqueuing all the dependencies produces no new tasks, we are done. - pm.enqueueDependencyList(package.dependencies); - if (current_tasks == pm.total_tasks) { - tags[tag_i] = .done; - done_count += 1; - } - }, - .extracting => { - // we are extracting the package - // we need to wait for the next poll - continue; - }, - .extract => {}, - else => {}, - } - } - - if (done_count == tags.len) { - module.done(this.vm()); - } else { - modules[i] = module; - i += 1; - } - } - this.map.items.len = i; - if (i == 0) { - // ensure we always end the progress bar - this.vm().packageManager().endProgressBar(); - } - } - - pub fn vm(this: *Queue) *VirtualMachine { - return @alignCast(@fieldParentPtr("modules", this)); - } - }; - - pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule { - // var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice(); - // var expr_blocks = js_ast.Expr.Data.toOwnedSlice(); - const this_promise = JSValue.createInternalPromise(globalObject); - const promise = jsc.Strong.Optional.create(this_promise, globalObject); - - var buf = bun.StringBuilder{}; - buf.count(opts.referrer); - buf.count(opts.specifier); - buf.count(opts.path.text); - - try buf.allocate(bun.default_allocator); - opts.promise_ptr.?.* = this_promise.asInternalPromise().?; - const referrer = buf.append(opts.referrer); - const specifier = buf.append(opts.specifier); - const path = Fs.Path.init(buf.append(opts.path.text)); - - return AsyncModule{ - .parse_result = opts.parse_result, - .promise = promise, - .path = path, - .specifier = specifier, - .referrer = referrer, - .fd = opts.fd, - .package_json = opts.package_json, - .loader = opts.loader.toAPI(), - .string_buf = buf.allocatedSlice(), - // .stmt_blocks = stmt_blocks, - // .expr_blocks = expr_blocks, - .globalThis = globalObject, - .arena = opts.arena, - }; - } - - pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void { - var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable; - clone.* = this.*; - jsc_vm.modules.scheduled += 1; - clone.any_task = jsc.AnyTask.New(AsyncModule, onDone).init(clone); - jsc_vm.enqueueTask(jsc.Task.init(&clone.any_task)); - } - - pub fn onDone(this: *AsyncModule) void { - jsc.markBinding(@src()); - var jsc_vm = this.globalThis.bunVM(); - jsc_vm.modules.scheduled -= 1; - if (jsc_vm.modules.scheduled == 0) { - jsc_vm.packageManager().endProgressBar(); - } - var log = logger.Log.init(jsc_vm.allocator); - defer log.deinit(); - var errorable: jsc.ErrorableResolvedSource = undefined; - this.poll_ref.unref(jsc_vm); - outer: { - errorable = jsc.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { - switch (err) { - error.JSError => { - errorable = .err(error.JSError, this.globalThis.takeError(error.JSError)); - break :outer; - }, - else => { - VirtualMachine.processFetchLog( - this.globalThis, - bun.String.init(this.specifier), - bun.String.init(this.referrer), - &log, - &errorable, - err, - ); - break :outer; - }, - } - }); - } - - var spec = bun.String.init(ZigString.init(this.specifier).withEncoding()); - var ref = bun.String.init(ZigString.init(this.referrer).withEncoding()); - bun.jsc.fromJSHostCallGeneric(this.globalThis, @src(), Bun__onFulfillAsyncModule, .{ - this.globalThis, - this.promise.get().?, - &errorable, - &spec, - &ref, - }) catch {}; - this.deinit(); - jsc_vm.allocator.destroy(this); - } - - pub fn fulfill( - globalThis: *JSGlobalObject, - promise: JSValue, - resolved_source: *ResolvedSource, - err: ?anyerror, - specifier_: bun.String, - referrer_: bun.String, - log: *logger.Log, - ) bun.JSError!void { - jsc.markBinding(@src()); - var specifier = specifier_; - var referrer = referrer_; - var scope: jsc.CatchScope = undefined; - scope.init(globalThis, @src()); - defer { - specifier.deref(); - referrer.deref(); - scope.deinit(); - } - - var errorable: jsc.ErrorableResolvedSource = undefined; - if (err) |e| { - defer { - if (resolved_source.source_code_needs_deref) { - resolved_source.source_code_needs_deref = false; - resolved_source.source_code.deref(); - } - } - - if (e == error.JSError) { - errorable = jsc.ErrorableResolvedSource.err(error.JSError, globalThis.takeError(error.JSError)); - } else { - VirtualMachine.processFetchLog( - globalThis, - specifier, - referrer, - log, - &errorable, - e, - ); - } - } else { - errorable = jsc.ErrorableResolvedSource.ok(resolved_source.*); - } - log.deinit(); - - debug("fulfill: {any}", .{specifier}); - - try bun.jsc.fromJSHostCallGeneric(globalThis, @src(), Bun__onFulfillAsyncModule, .{ - globalThis, - promise, - &errorable, - &specifier, - &referrer, - }); - } - - pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { - const globalThis = this.globalThis; - - const msg: []u8 = try switch (result.err) { - error.PackageManifestHTTP400 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 400 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP401 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 401 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP402 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 402 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP403 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 403 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP404 => std.fmt.allocPrint( - bun.default_allocator, - "Package '{s}' was not found", - .{result.name}, - ), - error.PackageManifestHTTP4xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 4xx while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP5xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 5xx while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.DistTagNotFound, error.NoMatchingVersion => brk: { - const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.version.isExact()) - "Version not found" - else if (result.version.tag == .npm and !result.version.value.npm.version.isExact()) - "No matching version found" - else - "No match found"; - - break :brk std.fmt.allocPrint( - bun.default_allocator, - "{s} '{s}' for package '{s}' (but package exists)", - .{ prefix, vm.packageManager().lockfile.str(&result.version.literal), result.name }, - ); - }, - else => |err| std.fmt.allocPrint( - bun.default_allocator, - "{s} resolving package '{s}' at '{s}'", - .{ bun.asByteSlice(@errorName(err)), result.name, result.url }, - ), - }; - - const name: []const u8 = switch (result.err) { - error.NoMatchingVersion => "PackageVersionNotFound", - error.DistTagNotFound => "PackageTagNotFound", - error.PackageManifestHTTP403 => "PackageForbidden", - error.PackageManifestHTTP404 => "PackageNotFound", - else => "PackageResolveError", - }; - - var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); - if (result.url.len > 0) - error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); - const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; - error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); - if (location.line_text) |line_text| { - error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); - } - error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); - if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) { - error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toJS(globalThis)); - } - - const promise_value = this.promise.swap(); - var promise = promise_value.asInternalPromise().?; - promise_value.ensureStillAlive(); - this.poll_ref.unref(vm); - this.deinit(); - promise.rejectAsHandled(globalThis, error_instance); - } - pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { - const globalThis = this.globalThis; - - const msg_args = .{ - result.name, - result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), - }; - - const msg: []u8 = try switch (result.err) { - error.TarballHTTP400 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 400 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP401 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 401 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP402 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 402 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP403 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 403 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP404 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 404 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP4xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 4xx downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP5xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 5xx downloading package '{s}@{any}'", - msg_args, - ), - error.TarballFailedToExtract => std.fmt.allocPrint( - bun.default_allocator, - "Failed to extract tarball for package '{s}@{any}'", - msg_args, - ), - else => |err| std.fmt.allocPrint( - bun.default_allocator, - "{s} downloading package '{s}@{any}'", - .{ - bun.asByteSlice(@errorName(err)), - result.name, - result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), - }, - ), - }; - - const name: []const u8 = switch (result.err) { - error.TarballFailedToExtract => "PackageExtractionError", - error.TarballHTTP403 => "TarballForbiddenError", - error.TarballHTTP404 => "TarballNotFoundError", - else => "TarballDownloadError", - }; - - var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); - if (result.url.len > 0) - error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); - if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) { - error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); - } - - const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; - error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init( - this.parse_result.ast.import_records.at(import_record_id).path.text, - ).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); - if (location.line_text) |line_text| { - error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); - } - error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); - - const promise_value = this.promise.swap(); - var promise = promise_value.asInternalPromise().?; - promise_value.ensureStillAlive(); - this.poll_ref.unref(vm); - this.deinit(); - promise.rejectAsHandled(globalThis, error_instance); - } - - pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource { - debug("resumeLoadingModule: {s}", .{this.specifier}); - var parse_result = this.parse_result; - const path = this.path; - var jsc_vm = VirtualMachine.get(); - const specifier = this.specifier; - const old_log = jsc_vm.log; - - jsc_vm.transpiler.linker.log = log; - jsc_vm.transpiler.log = log; - jsc_vm.transpiler.resolver.log = log; - jsc_vm.packageManager().log = log; - defer { - jsc_vm.transpiler.linker.log = old_log; - jsc_vm.transpiler.log = old_log; - jsc_vm.transpiler.resolver.log = old_log; - jsc_vm.packageManager().log = old_log; - } - - // We _must_ link because: - // - node_modules bundle won't be properly - try jsc_vm.transpiler.linker.link( - path, - &parse_result, - jsc_vm.origin, - .absolute_path, - false, - true, - ); - this.parse_result = parse_result; - - var printer = VirtualMachine.source_code_printer.?.*; - printer.ctx.reset(); - - { - var mapper = jsc_vm.sourceMapHandler(&printer); - defer VirtualMachine.source_code_printer.?.* = printer; - _ = try jsc_vm.transpiler.printWithSourceMap( - parse_result, - @TypeOf(&printer), - &printer, - .esm_ascii, - mapper.get(), - ); - } - - if (comptime Environment.dump_source) { - dumpSource(jsc_vm, specifier, &printer); - } - - if (jsc_vm.isWatcherEnabled()) { - var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, bun.String.init(specifier), path.text, null, false); - - if (parse_result.input_fd) |fd_| { - if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - _ = jsc_vm.bun_watcher.addFile( - fd_, - path.text, - this.hash, - options.Loader.fromAPI(this.loader), - .invalid, - this.package_json, - true, - ); - } - } - - resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; - - return resolved_source; - } - - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.cloneLatin1(printer.ctx.getWritten()), - .specifier = String.init(specifier), - .source_url = String.init(path.text), - .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, - }; - } - - pub fn deinit(this: *AsyncModule) void { - this.promise.deinit(); - this.parse_result.deinit(); - this.arena.deinit(); - this.globalThis.bunVM().allocator.destroy(this.arena); - // bun.default_allocator.free(this.stmt_blocks); - // bun.default_allocator.free(this.expr_blocks); - - bun.default_allocator.free(this.string_buf); - } - - extern "c" fn Bun__onFulfillAsyncModule( - globalObject: *JSGlobalObject, - promiseValue: JSValue, - res: *jsc.ErrorableResolvedSource, - specifier: *bun.String, - referrer: *bun.String, - ) void; -}; - pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.String) api.Loader { var jsc_vm = global.bunVM(); const filename = str.toUTF8(jsc_vm.allocator); @@ -2025,586 +1289,6 @@ inline fn jsSyntheticModule(name: ResolvedSource.Tag, specifier: String) Resolve /// /// This can technically fail if concurrent access across processes happens, or permission issues. /// Errors here should always be ignored. -fn dumpSource(vm: *VirtualMachine, specifier: string, printer: anytype) void { - dumpSourceString(vm, specifier, printer.ctx.getWritten()); -} - -fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8) void { - dumpSourceStringFailiable(vm, specifier, written) catch |e| { - Output.debugWarn("Failed to dump source string: {}", .{e}); - }; -} - -fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void { - if (!Environment.isDebug) return; - if (bun.feature_flag.BUN_DEBUG_NO_DUMP.get()) return; - - const BunDebugHolder = struct { - pub var dir: ?std.fs.Dir = null; - pub var lock: bun.Mutex = .{}; - }; - - BunDebugHolder.lock.lock(); - defer BunDebugHolder.lock.unlock(); - - const dir = BunDebugHolder.dir orelse dir: { - const base_name = switch (Environment.os) { - else => "/tmp/bun-debug-src/", - .windows => brk: { - const temp = bun.fs.FileSystem.RealFS.platformTempDir(); - var win_temp_buffer: bun.PathBuffer = undefined; - @memcpy(win_temp_buffer[0..temp.len], temp); - const suffix = "\\bun-debug-src"; - @memcpy(win_temp_buffer[temp.len .. temp.len + suffix.len], suffix); - win_temp_buffer[temp.len + suffix.len] = 0; - break :brk win_temp_buffer[0 .. temp.len + suffix.len :0]; - }, - }; - const dir = try std.fs.cwd().makeOpenPath(base_name, .{}); - BunDebugHolder.dir = dir; - break :dir dir; - }; - - if (std.fs.path.dirname(specifier)) |dir_path| { - const root_len = switch (Environment.os) { - else => "/".len, - .windows => bun.path.windowsFilesystemRoot(dir_path).len, - }; - var parent = try dir.makeOpenPath(dir_path[root_len..], .{}); - defer parent.close(); - parent.writeFile(.{ - .sub_path = std.fs.path.basename(specifier), - .data = written, - }) catch |e| { - Output.debugWarn("Failed to dump source string: writeFile {}", .{e}); - return; - }; - if (vm.source_mappings.get(specifier)) |mappings| { - defer mappings.deref(); - const map_path = bun.handleOom(std.mem.concat(bun.default_allocator, u8, &.{ std.fs.path.basename(specifier), ".map" })); - defer bun.default_allocator.free(map_path); - const file = try parent.createFile(map_path, .{}); - defer file.close(); - - const source_file = parent.readFileAlloc( - bun.default_allocator, - specifier, - std.math.maxInt(u64), - ) catch ""; - defer bun.default_allocator.free(source_file); - - var bufw = std.io.bufferedWriter(file.writer()); - const w = bufw.writer(); - try w.print( - \\{{ - \\ "version": 3, - \\ "file": {}, - \\ "sourceRoot": "", - \\ "sources": [{}], - \\ "sourcesContent": [{}], - \\ "names": [], - \\ "mappings": "{}" - \\}} - , .{ - bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), - bun.fmt.formatJSONStringUTF8(specifier, .{}), - bun.fmt.formatJSONStringUTF8(source_file, .{}), - mappings.formatVLQs(), - }); - try bufw.flush(); - } - } else { - dir.writeFile(.{ - .sub_path = std.fs.path.basename(specifier), - .data = written, - }) catch return; - } -} - -fn setBreakPointOnFirstLine() bool { - const s = struct { - var set_break_point: bool = true; - }; - const ret = s.set_break_point; - s.set_break_point = false; - return ret; -} - -pub const RuntimeTranspilerStore = struct { - generation_number: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - store: TranspilerJob.Store, - enabled: bool = true, - queue: Queue = Queue{}, - - pub const Queue = bun.UnboundedQueue(TranspilerJob, .next); - - pub fn init() RuntimeTranspilerStore { - return RuntimeTranspilerStore{ - .store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)), - }; - } - - pub fn runFromJSThread(this: *RuntimeTranspilerStore, event_loop: *jsc.EventLoop, global: *jsc.JSGlobalObject, vm: *jsc.VirtualMachine) void { - var batch = this.queue.popBatch(); - const jsc_vm = vm.jsc_vm; - var iter = batch.iterator(); - if (iter.next()) |job| { - // we run just one job first to see if there are more - job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err); - } else { - return; - } - while (iter.next()) |job| { - // if there are more, we need to drain the microtasks from the previous run - event_loop.drainMicrotasksWithGlobal(global, jsc_vm) catch return; - job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err); - } - - // immediately after this is called, the microtasks will be drained again. - } - - pub fn transpile( - this: *RuntimeTranspilerStore, - vm: *VirtualMachine, - globalObject: *JSGlobalObject, - input_specifier: bun.String, - path: Fs.Path, - referrer: bun.String, - loader: bun.options.Loader, - package_json: ?*const PackageJSON, - ) *anyopaque { - var job: *TranspilerJob = this.store.get(); - const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable); - const promise = jsc.JSInternalPromise.create(globalObject); - - // NOTE: DirInfo should already be cached since module loading happens - // after module resolution, so this should be cheap - var resolved_source = ResolvedSource{}; - if (package_json) |pkg| { - switch (pkg.module_type) { - .cjs => { - resolved_source.tag = .package_json_type_commonjs; - resolved_source.is_commonjs_module = true; - }, - .esm => resolved_source.tag = .package_json_type_module, - .unknown => {}, - } - } - - job.* = TranspilerJob{ - .non_threadsafe_input_specifier = input_specifier, - .path = owned_path, - .globalThis = globalObject, - .non_threadsafe_referrer = referrer, - .vm = vm, - .log = logger.Log.init(bun.default_allocator), - .loader = loader, - .promise = .create(JSValue.fromCell(promise), globalObject), - .poll_ref = .{}, - .fetcher = TranspilerJob.Fetcher{ - .file = {}, - }, - .resolved_source = resolved_source, - }; - if (comptime Environment.allow_assert) - debug("transpile({s}, {s}, async)", .{ path.text, @tagName(job.loader) }); - job.schedule(); - return promise; - } - - pub const TranspilerJob = struct { - path: Fs.Path, - non_threadsafe_input_specifier: String, - non_threadsafe_referrer: String, - loader: options.Loader, - promise: jsc.Strong.Optional = .empty, - vm: *VirtualMachine, - globalThis: *JSGlobalObject, - fetcher: Fetcher, - poll_ref: Async.KeepAlive = .{}, - generation_number: u32 = 0, - log: logger.Log, - parse_error: ?anyerror = null, - resolved_source: ResolvedSource = ResolvedSource{}, - work_task: jsc.WorkPoolTask = .{ .callback = runFromWorkerThread }, - next: ?*TranspilerJob = null, - - pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback; - - pub const Fetcher = union(enum) { - virtual_module: bun.String, - file: void, - - pub fn deinit(this: *@This()) void { - if (this.* == .virtual_module) { - this.virtual_module.deref(); - } - } - }; - - pub fn deinit(this: *TranspilerJob) void { - bun.default_allocator.free(this.path.text); - - this.poll_ref.disable(); - this.fetcher.deinit(); - this.loader = options.Loader.file; - this.non_threadsafe_input_specifier.deref(); - this.non_threadsafe_referrer.deref(); - this.path = Fs.Path.empty; - this.log.deinit(); - this.promise.deinit(); - this.globalThis = undefined; - } - - threadlocal var ast_memory_store: ?*js_ast.ASTMemoryAllocator = null; - threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; - - pub fn dispatchToMainThread(this: *TranspilerJob) void { - this.vm.transpiler_store.queue.push(this); - this.vm.eventLoop().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(&this.vm.transpiler_store)); - } - - pub fn runFromJSThread(this: *TranspilerJob) bun.JSError!void { - var vm = this.vm; - const promise = this.promise.swap(); - const globalThis = this.globalThis; - this.poll_ref.unref(vm); - - const referrer = this.non_threadsafe_referrer; - this.non_threadsafe_referrer = String.empty; - var log = this.log; - this.log = logger.Log.init(bun.default_allocator); - var resolved_source = this.resolved_source; - const specifier = brk: { - if (this.parse_error != null) { - break :brk bun.String.cloneUTF8(this.path.text); - } - - const out = this.non_threadsafe_input_specifier; - this.non_threadsafe_input_specifier = String.empty; - - bun.debugAssert(resolved_source.source_url.isEmpty()); - bun.debugAssert(resolved_source.specifier.isEmpty()); - resolved_source.source_url = out.createIfDifferent(this.path.text); - resolved_source.specifier = out.dupeRef(); - break :brk out; - }; - - const parse_error = this.parse_error; - - this.promise.deinit(); - this.deinit(); - - _ = vm.transpiler_store.store.put(this); - - try ModuleLoader.AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log); - } - - pub fn schedule(this: *TranspilerJob) void { - this.poll_ref.ref(this.vm); - jsc.WorkPool.schedule(&this.work_task); - } - - pub fn runFromWorkerThread(work_task: *jsc.WorkPoolTask) void { - @as(*TranspilerJob, @fieldParentPtr("work_task", work_task)).run(); - } - - pub fn run(this: *TranspilerJob) void { - var arena = bun.ArenaAllocator.init(bun.default_allocator); - defer arena.deinit(); - const allocator = arena.allocator(); - - defer this.dispatchToMainThread(); - if (this.generation_number != this.vm.transpiler_store.generation_number.load(.monotonic)) { - this.parse_error = error.TranspilerJobGenerationMismatch; - return; - } - - if (ast_memory_store == null) { - ast_memory_store = bun.handleOom(bun.default_allocator.create(js_ast.ASTMemoryAllocator)); - ast_memory_store.?.* = js_ast.ASTMemoryAllocator{ - .allocator = allocator, - .previous = null, - }; - } - - var ast_scope = ast_memory_store.?.enter(allocator); - defer ast_scope.exit(); - - const path = this.path; - const specifier = this.path.text; - const loader = this.loader; - - var cache = jsc.RuntimeTranspilerCache{ - .output_code_allocator = allocator, - .sourcemap_allocator = bun.default_allocator, - }; - var log = logger.Log.init(allocator); - defer { - this.log = logger.Log.init(bun.default_allocator); - bun.handleOom(log.cloneToWithRecycled(&this.log, true)); - } - var vm = this.vm; - var transpiler: bun.Transpiler = undefined; - transpiler = vm.transpiler; - transpiler.setAllocator(allocator); - transpiler.setLog(&log); - transpiler.resolver.opts = transpiler.options; - transpiler.macro_context = null; - transpiler.linker.resolver = &transpiler.resolver; - - var fd: ?StoredFileDescriptorType = null; - var package_json: ?*PackageJSON = null; - const hash = bun.Watcher.getHash(path.text); - - switch (vm.bun_watcher) { - .hot, .watch => { - if (vm.bun_watcher.indexOf(hash)) |index| { - const watcher_fd = vm.bun_watcher.watchlist().items(.fd)[index]; - fd = if (watcher_fd.stdioTag() == null) watcher_fd else null; - package_json = vm.bun_watcher.watchlist().items(.package_json)[index]; - } - }, - else => {}, - } - - // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words - const is_node_override = strings.hasPrefixComptime(specifier, node_fallbacks.import_path); - - const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override) - MacroRemap{} - else - transpiler.options.macro_remap; - - var fallback_source: logger.Source = undefined; - - // Usually, we want to close the input file automatically. - // - // If we're re-using the file descriptor from the fs watcher - // Do not close it because that will break the kqueue-based watcher - // - var should_close_input_file_fd = fd == null; - - var input_file_fd: StoredFileDescriptorType = .invalid; - - const is_main = vm.main.len == path.text.len and - vm.main_hash == hash and - strings.eqlLong(vm.main, path.text, false); - - const module_type: ModuleType = switch (this.resolved_source.tag) { - .package_json_type_commonjs => .cjs, - .package_json_type_module => .esm, - else => .unknown, - }; - - var parse_options = Transpiler.ParseOptions{ - .allocator = allocator, - .path = path, - .loader = loader, - .dirname_fd = .invalid, - .file_descriptor = fd, - .file_fd_ptr = &input_file_fd, - .file_hash = hash, - .macro_remappings = macro_remappings, - .jsx = transpiler.options.jsx, - .emit_decorator_metadata = transpiler.options.emit_decorator_metadata, - .virtual_source = null, - .dont_bundle_twice = true, - .allow_commonjs = true, - .inject_jest_globals = transpiler.options.rewrite_jest_for_tests, - .set_breakpoint_on_first_line = vm.debugger != null and - vm.debugger.?.set_breakpoint_on_first_line and - is_main and - setBreakPointOnFirstLine(), - .runtime_transpiler_cache = if (!jsc.RuntimeTranspilerCache.is_disabled) &cache else null, - .remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null, - .module_type = module_type, - .allow_bytecode_cache = true, - }; - - defer { - if (should_close_input_file_fd and input_file_fd.isValid()) { - input_file_fd.close(); - input_file_fd = .invalid; - } - } - - if (is_node_override) { - if (node_fallbacks.contentsFromPath(specifier)) |code| { - const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); - fallback_source = logger.Source{ .path = fallback_path, .contents = code }; - parse_options.virtual_source = &fallback_source; - } - } - - var parse_result: bun.transpiler.ParseResult = transpiler.parseMaybeReturnFileOnlyAllowSharedBuffer( - parse_options, - null, - false, - false, - ) orelse { - if (vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - should_close_input_file_fd = false; - _ = vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - - this.parse_error = error.ParseError; - - return; - }; - - if (vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and - std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) - { - should_close_input_file_fd = false; - _ = vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - - if (cache.entry) |*entry| { - vm.source_mappings.putMappings(&parse_result.source, .{ - .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, - .allocator = bun.default_allocator, - }) catch {}; - - if (comptime Environment.dump_source) { - dumpSourceString(vm, specifier, entry.output_code.byteSlice()); - } - - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = switch (entry.output_code) { - .string => entry.output_code.string, - .utf8 => brk: { - const result = bun.String.cloneUTF8(entry.output_code.utf8); - cache.output_code_allocator.free(entry.output_code.utf8); - entry.output_code.utf8 = ""; - break :brk result; - }, - }, - .is_commonjs_module = entry.metadata.module_type == .cjs, - .tag = this.resolved_source.tag, - }; - - return; - } - - if (parse_result.already_bundled != .none) { - const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = bun.String.cloneLatin1(parse_result.source.contents), - .already_bundled = true, - .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, - .bytecode_cache_size = bytecode_slice.len, - .is_commonjs_module = parse_result.already_bundled.isCommonJS(), - .tag = this.resolved_source.tag, - }; - this.resolved_source.source_code.ensureHash(); - return; - } - - for (parse_result.ast.import_records.slice()) |*import_record_| { - var import_record: *bun.ImportRecord = import_record_; - - if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, transpiler.options.target, .{ .rewrite_jest_for_tests = transpiler.options.rewrite_jest_for_tests })) |replacement| { - import_record.path.text = replacement.path; - import_record.tag = replacement.tag; - import_record.is_external_without_side_effects = true; - continue; - } - - if (strings.hasPrefixComptime(import_record.path.text, "bun:")) { - import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]); - import_record.path.namespace = "bun"; - import_record.is_external_without_side_effects = true; - } - } - - if (source_code_printer == null) { - const writer = js_printer.BufferWriter.init(bun.default_allocator); - source_code_printer = bun.default_allocator.create(js_printer.BufferPrinter) catch unreachable; - source_code_printer.?.* = js_printer.BufferPrinter.init(writer); - source_code_printer.?.ctx.append_null_byte = false; - } - - var printer = source_code_printer.?.*; - printer.ctx.reset(); - - { - var mapper = vm.sourceMapHandler(&printer); - defer source_code_printer.?.* = printer; - _ = transpiler.printWithSourceMap( - parse_result, - @TypeOf(&printer), - &printer, - .esm_ascii, - mapper.get(), - ) catch |err| { - this.parse_error = err; - return; - }; - } - - if (comptime Environment.dump_source) { - dumpSource(this.vm, specifier, &printer); - } - - const source_code = brk: { - const written = printer.ctx.getWritten(); - - const result = cache.output_code orelse bun.String.cloneLatin1(written); - - if (written.len > 1024 * 1024 * 2 or vm.smol) { - printer.ctx.buffer.deinit(); - source_code_printer.?.* = printer; - } - - // In a benchmarking loading @babel/standalone 100 times: - // - // After ensureHash: - // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const - // - // Before ensureHash: - // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const - // - result.ensureHash(); - - break :brk result; - }; - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = source_code, - .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, - .tag = this.resolved_source.tag, - }; - } - }; -}; - pub const FetchFlags = enum { transpile, print_source, @@ -2615,430 +1299,6 @@ pub const FetchFlags = enum { } }; -pub const HardcodedModule = enum { - bun, - @"abort-controller", - @"bun:app", - @"bun:ffi", - @"bun:jsc", - @"bun:main", - @"bun:test", - @"bun:wrap", - @"bun:sqlite", - @"node:assert", - @"node:assert/strict", - @"node:async_hooks", - @"node:buffer", - @"node:child_process", - @"node:console", - @"node:constants", - @"node:crypto", - @"node:dns", - @"node:dns/promises", - @"node:domain", - @"node:events", - @"node:fs", - @"node:fs/promises", - @"node:http", - @"node:https", - @"node:module", - @"node:net", - @"node:os", - @"node:path", - @"node:path/posix", - @"node:path/win32", - @"node:perf_hooks", - @"node:process", - @"node:querystring", - @"node:readline", - @"node:readline/promises", - @"node:stream", - @"node:stream/consumers", - @"node:stream/promises", - @"node:stream/web", - @"node:string_decoder", - @"node:test", - @"node:timers", - @"node:timers/promises", - @"node:tls", - @"node:tty", - @"node:url", - @"node:util", - @"node:util/types", - @"node:vm", - @"node:wasi", - @"node:zlib", - @"node:worker_threads", - @"node:punycode", - undici, - ws, - @"isomorphic-fetch", - @"node-fetch", - vercel_fetch, - @"utf-8-validate", - @"node:v8", - @"node:trace_events", - @"node:repl", - @"node:inspector", - @"node:http2", - @"node:diagnostics_channel", - @"node:dgram", - @"node:cluster", - @"node:_stream_duplex", - @"node:_stream_passthrough", - @"node:_stream_readable", - @"node:_stream_transform", - @"node:_stream_wrap", - @"node:_stream_writable", - @"node:_tls_common", - @"node:_http_agent", - @"node:_http_client", - @"node:_http_common", - @"node:_http_incoming", - @"node:_http_outgoing", - @"node:_http_server", - /// This is gated behind '--expose-internals' - @"bun:internal-for-testing", - - /// The module loader first uses `Aliases` to get a single string during - /// resolution, then maps that single string to the actual module. - /// Do not include aliases here; Those go in `Aliases`. - pub const map = bun.ComptimeStringMap(HardcodedModule, [_]struct { []const u8, HardcodedModule }{ - // Bun - .{ "bun", .bun }, - .{ "bun:app", .@"bun:app" }, - .{ "bun:ffi", .@"bun:ffi" }, - .{ "bun:jsc", .@"bun:jsc" }, - .{ "bun:main", .@"bun:main" }, - .{ "bun:test", .@"bun:test" }, - .{ "bun:sqlite", .@"bun:sqlite" }, - .{ "bun:wrap", .@"bun:wrap" }, - .{ "bun:internal-for-testing", .@"bun:internal-for-testing" }, - // Node.js - .{ "node:assert", .@"node:assert" }, - .{ "node:assert/strict", .@"node:assert/strict" }, - .{ "node:async_hooks", .@"node:async_hooks" }, - .{ "node:buffer", .@"node:buffer" }, - .{ "node:child_process", .@"node:child_process" }, - .{ "node:cluster", .@"node:cluster" }, - .{ "node:console", .@"node:console" }, - .{ "node:constants", .@"node:constants" }, - .{ "node:crypto", .@"node:crypto" }, - .{ "node:dgram", .@"node:dgram" }, - .{ "node:diagnostics_channel", .@"node:diagnostics_channel" }, - .{ "node:dns", .@"node:dns" }, - .{ "node:dns/promises", .@"node:dns/promises" }, - .{ "node:domain", .@"node:domain" }, - .{ "node:events", .@"node:events" }, - .{ "node:fs", .@"node:fs" }, - .{ "node:fs/promises", .@"node:fs/promises" }, - .{ "node:http", .@"node:http" }, - .{ "node:http2", .@"node:http2" }, - .{ "node:https", .@"node:https" }, - .{ "node:inspector", .@"node:inspector" }, - .{ "node:module", .@"node:module" }, - .{ "node:net", .@"node:net" }, - .{ "node:readline", .@"node:readline" }, - .{ "node:test", .@"node:test" }, - .{ "node:os", .@"node:os" }, - .{ "node:path", .@"node:path" }, - .{ "node:path/posix", .@"node:path/posix" }, - .{ "node:path/win32", .@"node:path/win32" }, - .{ "node:perf_hooks", .@"node:perf_hooks" }, - .{ "node:process", .@"node:process" }, - .{ "node:punycode", .@"node:punycode" }, - .{ "node:querystring", .@"node:querystring" }, - .{ "node:readline", .@"node:readline" }, - .{ "node:readline/promises", .@"node:readline/promises" }, - .{ "node:repl", .@"node:repl" }, - .{ "node:stream", .@"node:stream" }, - .{ "node:stream/consumers", .@"node:stream/consumers" }, - .{ "node:stream/promises", .@"node:stream/promises" }, - .{ "node:stream/web", .@"node:stream/web" }, - .{ "node:string_decoder", .@"node:string_decoder" }, - .{ "node:timers", .@"node:timers" }, - .{ "node:timers/promises", .@"node:timers/promises" }, - .{ "node:tls", .@"node:tls" }, - .{ "node:trace_events", .@"node:trace_events" }, - .{ "node:tty", .@"node:tty" }, - .{ "node:url", .@"node:url" }, - .{ "node:util", .@"node:util" }, - .{ "node:util/types", .@"node:util/types" }, - .{ "node:v8", .@"node:v8" }, - .{ "node:vm", .@"node:vm" }, - .{ "node:wasi", .@"node:wasi" }, - .{ "node:worker_threads", .@"node:worker_threads" }, - .{ "node:zlib", .@"node:zlib" }, - .{ "node:_stream_duplex", .@"node:_stream_duplex" }, - .{ "node:_stream_passthrough", .@"node:_stream_passthrough" }, - .{ "node:_stream_readable", .@"node:_stream_readable" }, - .{ "node:_stream_transform", .@"node:_stream_transform" }, - .{ "node:_stream_wrap", .@"node:_stream_wrap" }, - .{ "node:_stream_writable", .@"node:_stream_writable" }, - .{ "node:_tls_common", .@"node:_tls_common" }, - .{ "node:_http_agent", .@"node:_http_agent" }, - .{ "node:_http_client", .@"node:_http_client" }, - .{ "node:_http_common", .@"node:_http_common" }, - .{ "node:_http_incoming", .@"node:_http_incoming" }, - .{ "node:_http_outgoing", .@"node:_http_outgoing" }, - .{ "node:_http_server", .@"node:_http_server" }, - - .{ "node-fetch", HardcodedModule.@"node-fetch" }, - .{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" }, - .{ "undici", HardcodedModule.undici }, - .{ "ws", HardcodedModule.ws }, - .{ "@vercel/fetch", HardcodedModule.vercel_fetch }, - .{ "utf-8-validate", HardcodedModule.@"utf-8-validate" }, - .{ "abort-controller", HardcodedModule.@"abort-controller" }, - }); - - /// Contains the list of built-in modules from the perspective of the module - /// loader. This logic is duplicated for `isBuiltinModule` and the like. - pub const Alias = struct { - path: [:0]const u8, - tag: ImportRecord.Tag = .builtin, - node_builtin: bool = false, - node_only_prefix: bool = false, - - fn nodeEntry(path: [:0]const u8) struct { string, Alias } { - return .{ - path, - .{ - .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, - .node_builtin = true, - }, - }; - } - fn nodeEntryOnlyPrefix(path: [:0]const u8) struct { string, Alias } { - return .{ - path, - .{ - .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, - .node_builtin = true, - .node_only_prefix = true, - }, - }; - } - fn entry(path: [:0]const u8) struct { string, Alias } { - return .{ path, .{ .path = path } }; - } - - // Applied to both --target=bun and --target=node - const common_alias_kvs = [_]struct { string, Alias }{ - nodeEntry("node:assert"), - nodeEntry("node:assert/strict"), - nodeEntry("node:async_hooks"), - nodeEntry("node:buffer"), - nodeEntry("node:child_process"), - nodeEntry("node:cluster"), - nodeEntry("node:console"), - nodeEntry("node:constants"), - nodeEntry("node:crypto"), - nodeEntry("node:dgram"), - nodeEntry("node:diagnostics_channel"), - nodeEntry("node:dns"), - nodeEntry("node:dns/promises"), - nodeEntry("node:domain"), - nodeEntry("node:events"), - nodeEntry("node:fs"), - nodeEntry("node:fs/promises"), - nodeEntry("node:http"), - nodeEntry("node:http2"), - nodeEntry("node:https"), - nodeEntry("node:inspector"), - nodeEntry("node:module"), - nodeEntry("node:net"), - nodeEntry("node:os"), - nodeEntry("node:path"), - nodeEntry("node:path/posix"), - nodeEntry("node:path/win32"), - nodeEntry("node:perf_hooks"), - nodeEntry("node:process"), - nodeEntry("node:punycode"), - nodeEntry("node:querystring"), - nodeEntry("node:readline"), - nodeEntry("node:readline/promises"), - nodeEntry("node:repl"), - nodeEntry("node:stream"), - nodeEntry("node:stream/consumers"), - nodeEntry("node:stream/promises"), - nodeEntry("node:stream/web"), - nodeEntry("node:string_decoder"), - nodeEntry("node:timers"), - nodeEntry("node:timers/promises"), - nodeEntry("node:tls"), - nodeEntry("node:trace_events"), - nodeEntry("node:tty"), - nodeEntry("node:url"), - nodeEntry("node:util"), - nodeEntry("node:util/types"), - nodeEntry("node:v8"), - nodeEntry("node:vm"), - nodeEntry("node:wasi"), - nodeEntry("node:worker_threads"), - nodeEntry("node:zlib"), - // New Node.js builtins only resolve from the prefixed one. - nodeEntryOnlyPrefix("node:test"), - - nodeEntry("assert"), - nodeEntry("assert/strict"), - nodeEntry("async_hooks"), - nodeEntry("buffer"), - nodeEntry("child_process"), - nodeEntry("cluster"), - nodeEntry("console"), - nodeEntry("constants"), - nodeEntry("crypto"), - nodeEntry("dgram"), - nodeEntry("diagnostics_channel"), - nodeEntry("dns"), - nodeEntry("dns/promises"), - nodeEntry("domain"), - nodeEntry("events"), - nodeEntry("fs"), - nodeEntry("fs/promises"), - nodeEntry("http"), - nodeEntry("http2"), - nodeEntry("https"), - nodeEntry("inspector"), - nodeEntry("module"), - nodeEntry("net"), - nodeEntry("os"), - nodeEntry("path"), - nodeEntry("path/posix"), - nodeEntry("path/win32"), - nodeEntry("perf_hooks"), - nodeEntry("process"), - nodeEntry("punycode"), - nodeEntry("querystring"), - nodeEntry("readline"), - nodeEntry("readline/promises"), - nodeEntry("repl"), - nodeEntry("stream"), - nodeEntry("stream/consumers"), - nodeEntry("stream/promises"), - nodeEntry("stream/web"), - nodeEntry("string_decoder"), - nodeEntry("timers"), - nodeEntry("timers/promises"), - nodeEntry("tls"), - nodeEntry("trace_events"), - nodeEntry("tty"), - nodeEntry("url"), - nodeEntry("util"), - nodeEntry("util/types"), - nodeEntry("v8"), - nodeEntry("vm"), - nodeEntry("wasi"), - nodeEntry("worker_threads"), - nodeEntry("zlib"), - - nodeEntry("node:_http_agent"), - nodeEntry("node:_http_client"), - nodeEntry("node:_http_common"), - nodeEntry("node:_http_incoming"), - nodeEntry("node:_http_outgoing"), - nodeEntry("node:_http_server"), - - nodeEntry("_http_agent"), - nodeEntry("_http_client"), - nodeEntry("_http_common"), - nodeEntry("_http_incoming"), - nodeEntry("_http_outgoing"), - nodeEntry("_http_server"), - - // sys is a deprecated alias for util - .{ "sys", .{ .path = "node:util", .node_builtin = true } }, - .{ "node:sys", .{ .path = "node:util", .node_builtin = true } }, - - // These are returned in builtinModules, but probably not many - // packages use them so we will just alias them. - .{ "node:_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, - .{ "node:_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, - .{ "node:_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, - .{ "node:_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, - .{ "node:_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, - .{ "node:_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, - .{ "node:_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, - .{ "node:_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, - .{ "_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, - .{ "_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, - .{ "_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, - .{ "_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, - .{ "_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, - .{ "_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, - .{ "_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, - .{ "_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, - }; - - const bun_extra_alias_kvs = [_]struct { string, Alias }{ - .{ "bun", .{ .path = "bun", .tag = .bun } }, - .{ "bun:test", .{ .path = "bun:test" } }, - .{ "bun:app", .{ .path = "bun:app" } }, - .{ "bun:ffi", .{ .path = "bun:ffi" } }, - .{ "bun:jsc", .{ .path = "bun:jsc" } }, - .{ "bun:sqlite", .{ .path = "bun:sqlite" } }, - .{ "bun:wrap", .{ .path = "bun:wrap" } }, - .{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } }, - .{ "ffi", .{ .path = "bun:ffi" } }, - - // inspector/promises is not implemented, it is an alias of inspector - .{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, - .{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, - - // Thirdparty packages we override - .{ "@vercel/fetch", .{ .path = "@vercel/fetch" } }, - .{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } }, - .{ "node-fetch", .{ .path = "node-fetch" } }, - .{ "undici", .{ .path = "undici" } }, - .{ "utf-8-validate", .{ .path = "utf-8-validate" } }, - .{ "ws", .{ .path = "ws" } }, - .{ "ws/lib/websocket", .{ .path = "ws" } }, - - // Polyfills we force to native - .{ "abort-controller", .{ .path = "abort-controller" } }, - .{ "abort-controller/polyfill", .{ .path = "abort-controller" } }, - - // To force Next.js to not use bundled dependencies. - .{ "next/dist/compiled/ws", .{ .path = "ws" } }, - .{ "next/dist/compiled/node-fetch", .{ .path = "node-fetch" } }, - .{ "next/dist/compiled/undici", .{ .path = "undici" } }, - }; - - const bun_test_extra_alias_kvs = [_]struct { string, Alias }{ - .{ "@jest/globals", .{ .path = "bun:test" } }, - .{ "vitest", .{ .path = "bun:test" } }, - }; - - const node_extra_alias_kvs = [_]struct { string, Alias }{ - nodeEntry("node:inspector/promises"), - nodeEntry("inspector/promises"), - }; - - const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs); - const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs); - const bun_test_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs ++ bun_test_extra_alias_kvs); - - const Cfg = struct { rewrite_jest_for_tests: bool = false }; - pub fn has(name: []const u8, target: options.Target, cfg: Cfg) bool { - return get(name, target, cfg) != null; - } - - pub fn get(name: []const u8, target: options.Target, cfg: Cfg) ?Alias { - if (target.isBun()) { - if (cfg.rewrite_jest_for_tests) { - return bun_test_aliases.get(name); - } else { - return bun_aliases.get(name); - } - } else if (target.isNode()) { - return node_aliases.get(name); - } - return null; - } - }; -}; - /// Support embedded .node files export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool { if (vm.standalone_module_graph == null) return false; @@ -3059,27 +1319,24 @@ const debug = Output.scoped(.ModuleLoader, .hidden); const string = []const u8; -const Dependency = @import("../install/dependency.zig"); const Fs = @import("../fs.zig"); const Runtime = @import("../runtime.zig"); +const ast = @import("../import_record.zig"); const node_module_module = @import("./bindings/NodeModuleModule.zig"); const std = @import("std"); const panic = std.debug.panic; -const ast = @import("../import_record.zig"); -const ImportRecord = ast.ImportRecord; - -const Install = @import("../install/install.zig"); -const PackageManager = @import("../install/install.zig").PackageManager; - const options = @import("../options.zig"); const ModuleType = options.ModuleType; const MacroRemap = @import("../resolver/package_json.zig").MacroMap; const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; +const dumpSource = @import("./RuntimeTranspilerStore.zig").dumpSource; +const dumpSourceString = @import("./RuntimeTranspilerStore.zig").dumpSourceString; +const setBreakPointOnFirstLine = @import("./RuntimeTranspilerStore.zig").setBreakPointOnFirstLine; + const bun = @import("bun"); -const Async = bun.Async; const Environment = bun.Environment; const MutableString = bun.MutableString; const Output = bun.Output; diff --git a/src/bun.js/RuntimeTranspilerStore.zig b/src/bun.js/RuntimeTranspilerStore.zig new file mode 100644 index 0000000000..695a3f10a1 --- /dev/null +++ b/src/bun.js/RuntimeTranspilerStore.zig @@ -0,0 +1,626 @@ +const debug = Output.scoped(.RuntimeTranspilerStore, .hidden); + +const string = []const u8; + +pub fn dumpSource(vm: *VirtualMachine, specifier: string, printer: anytype) void { + dumpSourceString(vm, specifier, printer.ctx.getWritten()); +} + +pub fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8) void { + dumpSourceStringFailiable(vm, specifier, written) catch |e| { + Output.debugWarn("Failed to dump source string: {}", .{e}); + }; +} + +pub fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void { + if (!Environment.isDebug) return; + if (bun.feature_flag.BUN_DEBUG_NO_DUMP.get()) return; + + const BunDebugHolder = struct { + pub var dir: ?std.fs.Dir = null; + pub var lock: bun.Mutex = .{}; + }; + + BunDebugHolder.lock.lock(); + defer BunDebugHolder.lock.unlock(); + + const dir = BunDebugHolder.dir orelse dir: { + const base_name = switch (Environment.os) { + else => "/tmp/bun-debug-src/", + .windows => brk: { + const temp = bun.fs.FileSystem.RealFS.platformTempDir(); + var win_temp_buffer: bun.PathBuffer = undefined; + @memcpy(win_temp_buffer[0..temp.len], temp); + const suffix = "\\bun-debug-src"; + @memcpy(win_temp_buffer[temp.len .. temp.len + suffix.len], suffix); + win_temp_buffer[temp.len + suffix.len] = 0; + break :brk win_temp_buffer[0 .. temp.len + suffix.len :0]; + }, + }; + const dir = try std.fs.cwd().makeOpenPath(base_name, .{}); + BunDebugHolder.dir = dir; + break :dir dir; + }; + + if (std.fs.path.dirname(specifier)) |dir_path| { + const root_len = switch (Environment.os) { + else => "/".len, + .windows => bun.path.windowsFilesystemRoot(dir_path).len, + }; + var parent = try dir.makeOpenPath(dir_path[root_len..], .{}); + defer parent.close(); + parent.writeFile(.{ + .sub_path = std.fs.path.basename(specifier), + .data = written, + }) catch |e| { + Output.debugWarn("Failed to dump source string: writeFile {}", .{e}); + return; + }; + if (vm.source_mappings.get(specifier)) |mappings| { + defer mappings.deref(); + const map_path = bun.handleOom(std.mem.concat(bun.default_allocator, u8, &.{ std.fs.path.basename(specifier), ".map" })); + defer bun.default_allocator.free(map_path); + const file = try parent.createFile(map_path, .{}); + defer file.close(); + + const source_file = parent.readFileAlloc( + bun.default_allocator, + specifier, + std.math.maxInt(u64), + ) catch ""; + defer bun.default_allocator.free(source_file); + + var bufw = std.io.bufferedWriter(file.writer()); + const w = bufw.writer(); + try w.print( + \\{{ + \\ "version": 3, + \\ "file": {}, + \\ "sourceRoot": "", + \\ "sources": [{}], + \\ "sourcesContent": [{}], + \\ "names": [], + \\ "mappings": "{}" + \\}} + , .{ + bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), + bun.fmt.formatJSONStringUTF8(specifier, .{}), + bun.fmt.formatJSONStringUTF8(source_file, .{}), + mappings.formatVLQs(), + }); + try bufw.flush(); + } + } else { + dir.writeFile(.{ + .sub_path = std.fs.path.basename(specifier), + .data = written, + }) catch return; + } +} + +pub fn setBreakPointOnFirstLine() bool { + const s = struct { + var set_break_point: std.atomic.Value(bool) = std.atomic.Value(bool).init(true); + }; + return s.set_break_point.swap(false, .seq_cst); +} + +pub const RuntimeTranspilerStore = struct { + generation_number: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), + store: TranspilerJob.Store, + enabled: bool = true, + queue: Queue = Queue{}, + + pub const Queue = bun.UnboundedQueue(TranspilerJob, .next); + + pub fn init() RuntimeTranspilerStore { + return RuntimeTranspilerStore{ + .store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)), + }; + } + + pub fn runFromJSThread(this: *RuntimeTranspilerStore, event_loop: *jsc.EventLoop, global: *jsc.JSGlobalObject, vm: *jsc.VirtualMachine) void { + var batch = this.queue.popBatch(); + const jsc_vm = vm.jsc_vm; + var iter = batch.iterator(); + if (iter.next()) |job| { + // we run just one job first to see if there are more + job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err); + } else { + return; + } + while (iter.next()) |job| { + // if there are more, we need to drain the microtasks from the previous run + event_loop.drainMicrotasksWithGlobal(global, jsc_vm) catch return; + job.runFromJSThread() catch |err| global.reportUncaughtExceptionFromError(err); + } + + // immediately after this is called, the microtasks will be drained again. + } + + pub fn transpile( + this: *RuntimeTranspilerStore, + vm: *VirtualMachine, + globalObject: *JSGlobalObject, + input_specifier: bun.String, + path: Fs.Path, + referrer: bun.String, + loader: bun.options.Loader, + package_json: ?*const PackageJSON, + ) *anyopaque { + var job: *TranspilerJob = this.store.get(); + const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable); + const promise = jsc.JSInternalPromise.create(globalObject); + + // NOTE: DirInfo should already be cached since module loading happens + // after module resolution, so this should be cheap + var resolved_source = ResolvedSource{}; + if (package_json) |pkg| { + switch (pkg.module_type) { + .cjs => { + resolved_source.tag = .package_json_type_commonjs; + resolved_source.is_commonjs_module = true; + }, + .esm => resolved_source.tag = .package_json_type_module, + .unknown => {}, + } + } + + job.* = TranspilerJob{ + .non_threadsafe_input_specifier = input_specifier, + .path = owned_path, + .globalThis = globalObject, + .non_threadsafe_referrer = referrer, + .vm = vm, + .log = logger.Log.init(bun.default_allocator), + .loader = loader, + .promise = .create(JSValue.fromCell(promise), globalObject), + .poll_ref = .{}, + .fetcher = TranspilerJob.Fetcher{ + .file = {}, + }, + .resolved_source = resolved_source, + .generation_number = this.generation_number.load(.seq_cst), + }; + if (comptime Environment.allow_assert) + debug("transpile({s}, {s}, async)", .{ path.text, @tagName(job.loader) }); + job.schedule(); + return promise; + } + + pub const TranspilerJob = struct { + path: Fs.Path, + non_threadsafe_input_specifier: String, + non_threadsafe_referrer: String, + loader: options.Loader, + promise: jsc.Strong.Optional = .empty, + vm: *VirtualMachine, + globalThis: *JSGlobalObject, + fetcher: Fetcher, + poll_ref: Async.KeepAlive = .{}, + generation_number: u32 = 0, + log: logger.Log, + parse_error: ?anyerror = null, + resolved_source: ResolvedSource = ResolvedSource{}, + work_task: jsc.WorkPoolTask = .{ .callback = runFromWorkerThread }, + next: ?*TranspilerJob = null, + + pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback; + + pub const Fetcher = union(enum) { + virtual_module: bun.String, + file: void, + + pub fn deinit(this: *@This()) void { + if (this.* == .virtual_module) { + this.virtual_module.deref(); + } + } + }; + + pub fn deinit(this: *TranspilerJob) void { + bun.default_allocator.free(this.path.text); + + this.poll_ref.disable(); + this.fetcher.deinit(); + this.loader = options.Loader.file; + this.non_threadsafe_input_specifier.deref(); + this.non_threadsafe_referrer.deref(); + this.path = Fs.Path.empty; + this.log.deinit(); + this.promise.deinit(); + this.globalThis = undefined; + } + + threadlocal var ast_memory_store: ?*js_ast.ASTMemoryAllocator = null; + threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; + + pub fn dispatchToMainThread(this: *TranspilerJob) void { + this.vm.transpiler_store.queue.push(this); + this.vm.eventLoop().enqueueTaskConcurrent(jsc.ConcurrentTask.createFrom(&this.vm.transpiler_store)); + } + + pub fn runFromJSThread(this: *TranspilerJob) bun.JSError!void { + var vm = this.vm; + const promise = this.promise.swap(); + const globalThis = this.globalThis; + this.poll_ref.unref(vm); + + const referrer = this.non_threadsafe_referrer; + this.non_threadsafe_referrer = String.empty; + var log = this.log; + this.log = logger.Log.init(bun.default_allocator); + var resolved_source = this.resolved_source; + const specifier = brk: { + if (this.parse_error != null) { + break :brk bun.String.cloneUTF8(this.path.text); + } + + const out = this.non_threadsafe_input_specifier; + this.non_threadsafe_input_specifier = String.empty; + + bun.debugAssert(resolved_source.source_url.isEmpty()); + bun.debugAssert(resolved_source.specifier.isEmpty()); + resolved_source.source_url = out.createIfDifferent(this.path.text); + resolved_source.specifier = out.dupeRef(); + break :brk out; + }; + + const parse_error = this.parse_error; + + this.promise.deinit(); + this.deinit(); + + _ = vm.transpiler_store.store.put(this); + + try AsyncModule.fulfill(globalThis, promise, &resolved_source, parse_error, specifier, referrer, &log); + } + + pub fn schedule(this: *TranspilerJob) void { + this.poll_ref.ref(this.vm); + jsc.WorkPool.schedule(&this.work_task); + } + + pub fn runFromWorkerThread(work_task: *jsc.WorkPoolTask) void { + @as(*TranspilerJob, @fieldParentPtr("work_task", work_task)).run(); + } + + pub fn run(this: *TranspilerJob) void { + var arena = bun.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + const allocator = arena.allocator(); + + defer this.dispatchToMainThread(); + if (this.generation_number != this.vm.transpiler_store.generation_number.load(.monotonic)) { + this.parse_error = error.TranspilerJobGenerationMismatch; + return; + } + + if (ast_memory_store == null) { + ast_memory_store = bun.handleOom(bun.default_allocator.create(js_ast.ASTMemoryAllocator)); + ast_memory_store.?.* = js_ast.ASTMemoryAllocator{ + .allocator = allocator, + .previous = null, + }; + } + + var ast_scope = ast_memory_store.?.enter(allocator); + defer ast_scope.exit(); + + const path = this.path; + const specifier = this.path.text; + const loader = this.loader; + + var cache = jsc.RuntimeTranspilerCache{ + .output_code_allocator = allocator, + .sourcemap_allocator = bun.default_allocator, + }; + var log = logger.Log.init(allocator); + defer { + this.log = logger.Log.init(bun.default_allocator); + bun.handleOom(log.cloneToWithRecycled(&this.log, true)); + } + var vm = this.vm; + var transpiler: bun.Transpiler = undefined; + transpiler = vm.transpiler; + transpiler.setAllocator(allocator); + transpiler.setLog(&log); + transpiler.resolver.opts = transpiler.options; + transpiler.macro_context = null; + transpiler.linker.resolver = &transpiler.resolver; + + var fd: ?StoredFileDescriptorType = null; + var package_json: ?*PackageJSON = null; + const hash = bun.Watcher.getHash(path.text); + + switch (vm.bun_watcher) { + .hot, .watch => { + if (vm.bun_watcher.indexOf(hash)) |index| { + const watcher_fd = vm.bun_watcher.watchlist().items(.fd)[index]; + fd = if (watcher_fd.stdioTag() == null) watcher_fd else null; + package_json = vm.bun_watcher.watchlist().items(.package_json)[index]; + } + }, + else => {}, + } + + // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words + const is_node_override = strings.hasPrefixComptime(specifier, node_fallbacks.import_path); + + const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override) + MacroRemap{} + else + transpiler.options.macro_remap; + + var fallback_source: logger.Source = undefined; + + // Usually, we want to close the input file automatically. + // + // If we're re-using the file descriptor from the fs watcher + // Do not close it because that will break the kqueue-based watcher + // + var should_close_input_file_fd = fd == null; + + var input_file_fd: StoredFileDescriptorType = .invalid; + + const is_main = vm.main.len == path.text.len and + vm.main_hash == hash and + strings.eqlLong(vm.main, path.text, false); + + const module_type: ModuleType = switch (this.resolved_source.tag) { + .package_json_type_commonjs => .cjs, + .package_json_type_module => .esm, + else => .unknown, + }; + + var parse_options = Transpiler.ParseOptions{ + .allocator = allocator, + .path = path, + .loader = loader, + .dirname_fd = .invalid, + .file_descriptor = fd, + .file_fd_ptr = &input_file_fd, + .file_hash = hash, + .macro_remappings = macro_remappings, + .jsx = transpiler.options.jsx, + .emit_decorator_metadata = transpiler.options.emit_decorator_metadata, + .virtual_source = null, + .dont_bundle_twice = true, + .allow_commonjs = true, + .inject_jest_globals = transpiler.options.rewrite_jest_for_tests, + .set_breakpoint_on_first_line = vm.debugger != null and + vm.debugger.?.set_breakpoint_on_first_line and + is_main and + setBreakPointOnFirstLine(), + .runtime_transpiler_cache = if (!jsc.RuntimeTranspilerCache.is_disabled) &cache else null, + .remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null, + .module_type = module_type, + .allow_bytecode_cache = true, + }; + + defer { + if (should_close_input_file_fd and input_file_fd.isValid()) { + input_file_fd.close(); + input_file_fd = .invalid; + } + } + + if (is_node_override) { + if (node_fallbacks.contentsFromPath(specifier)) |code| { + const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); + fallback_source = logger.Source{ .path = fallback_path, .contents = code }; + parse_options.virtual_source = &fallback_source; + } + } + + var parse_result: bun.transpiler.ParseResult = transpiler.parseMaybeReturnFileOnlyAllowSharedBuffer( + parse_options, + null, + false, + false, + ) orelse { + if (vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + should_close_input_file_fd = false; + _ = vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + + this.parse_error = error.ParseError; + + return; + }; + + if (vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and + std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) + { + should_close_input_file_fd = false; + _ = vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + + if (cache.entry) |*entry| { + vm.source_mappings.putMappings(&parse_result.source, .{ + .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, + .allocator = bun.default_allocator, + }) catch {}; + + if (comptime Environment.dump_source) { + dumpSourceString(vm, specifier, entry.output_code.byteSlice()); + } + + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = switch (entry.output_code) { + .string => entry.output_code.string, + .utf8 => brk: { + const result = bun.String.cloneUTF8(entry.output_code.utf8); + cache.output_code_allocator.free(entry.output_code.utf8); + entry.output_code.utf8 = ""; + break :brk result; + }, + }, + .is_commonjs_module = entry.metadata.module_type == .cjs, + .tag = this.resolved_source.tag, + }; + + return; + } + + if (parse_result.already_bundled != .none) { + const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = bun.String.cloneLatin1(parse_result.source.contents), + .already_bundled = true, + .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, + .bytecode_cache_size = bytecode_slice.len, + .is_commonjs_module = parse_result.already_bundled.isCommonJS(), + .tag = this.resolved_source.tag, + }; + this.resolved_source.source_code.ensureHash(); + return; + } + + for (parse_result.ast.import_records.slice()) |*import_record_| { + var import_record: *bun.ImportRecord = import_record_; + + if (HardcodedModule.Alias.get(import_record.path.text, transpiler.options.target, .{ .rewrite_jest_for_tests = transpiler.options.rewrite_jest_for_tests })) |replacement| { + import_record.path.text = replacement.path; + import_record.tag = replacement.tag; + import_record.is_external_without_side_effects = true; + continue; + } + + if (strings.hasPrefixComptime(import_record.path.text, "bun:")) { + import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]); + import_record.path.namespace = "bun"; + import_record.is_external_without_side_effects = true; + } + } + + if (source_code_printer == null) { + const writer = js_printer.BufferWriter.init(bun.default_allocator); + source_code_printer = bun.default_allocator.create(js_printer.BufferPrinter) catch unreachable; + source_code_printer.?.* = js_printer.BufferPrinter.init(writer); + source_code_printer.?.ctx.append_null_byte = false; + } + + var printer = source_code_printer.?.*; + printer.ctx.reset(); + + // Cap buffer size to prevent unbounded growth + const max_buffer_cap = 512 * 1024; + if (printer.ctx.buffer.list.capacity > max_buffer_cap) { + printer.ctx.buffer.deinit(); + const writer = js_printer.BufferWriter.init(bun.default_allocator); + source_code_printer.?.* = js_printer.BufferPrinter.init(writer); + source_code_printer.?.ctx.append_null_byte = false; + printer = source_code_printer.?.*; + } + + { + var mapper = vm.sourceMapHandler(&printer); + defer source_code_printer.?.* = printer; + _ = transpiler.printWithSourceMap( + parse_result, + @TypeOf(&printer), + &printer, + .esm_ascii, + mapper.get(), + ) catch |err| { + this.parse_error = err; + return; + }; + } + + if (comptime Environment.dump_source) { + dumpSource(this.vm, specifier, &printer); + } + + const source_code = brk: { + const written = printer.ctx.getWritten(); + + const result = cache.output_code orelse bun.String.cloneLatin1(written); + + if (written.len > 1024 * 1024 * 2 or vm.smol) { + printer.ctx.buffer.deinit(); + const writer = js_printer.BufferWriter.init(bun.default_allocator); + source_code_printer.?.* = js_printer.BufferPrinter.init(writer); + source_code_printer.?.ctx.append_null_byte = false; + } else { + source_code_printer.?.* = printer; + } + + // In a benchmarking loading @babel/standalone 100 times: + // + // After ensureHash: + // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const + // + // Before ensureHash: + // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const + // + result.ensureHash(); + + break :brk result; + }; + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = source_code, + .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, + .tag = this.resolved_source.tag, + }; + } + }; +}; + +const Fs = @import("../fs.zig"); +const node_fallbacks = @import("../node_fallbacks.zig"); +const std = @import("std"); +const AsyncModule = @import("./AsyncModule.zig").AsyncModule; +const HardcodedModule = @import("./HardcodedModule.zig").HardcodedModule; + +const options = @import("../options.zig"); +const ModuleType = options.ModuleType; + +const MacroRemap = @import("../resolver/package_json.zig").MacroMap; +const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; + +const bun = @import("bun"); +const Async = bun.Async; +const Environment = bun.Environment; +const Output = bun.Output; +const StoredFileDescriptorType = bun.StoredFileDescriptorType; +const String = bun.String; +const Transpiler = bun.Transpiler; +const js_ast = bun.ast; +const js_printer = bun.js_printer; +const logger = bun.logger; +const strings = bun.strings; + +const jsc = bun.jsc; +const JSGlobalObject = bun.jsc.JSGlobalObject; +const JSValue = bun.jsc.JSValue; +const ResolvedSource = bun.jsc.ResolvedSource; +const VirtualMachine = bun.jsc.VirtualMachine; From f58a0662367dcbc400d6aa2b56f82d326dbb550a Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 25 Oct 2025 21:34:24 -0700 Subject: [PATCH 006/102] Update CLAUDE.md --- CLAUDE.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CLAUDE.md b/CLAUDE.md index 5fa59d403c..986bff8ae9 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -76,7 +76,8 @@ test("my feature", async () => { - Use `normalizeBunSnapshot` to normalize snapshot output of the test. - NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test. - Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories. -- When spawning processes, tests should assert the output BEFORE asserting the exit code. This gives you a more useful error message on test failure. +- When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure. +- **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION. - **CRITICAL**: Verify your test fails with `USE_SYSTEM_BUN=1 bun test ` and passes with `bun bd test `. Your test is NOT VALID if it passes with `USE_SYSTEM_BUN=1`. ## Code Architecture From 4c00d8f0168b3e60350059251709768e41ced52c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 25 Oct 2025 22:03:34 -0700 Subject: [PATCH 007/102] deps: update elysia to 1.4.13 (#24085) ## What does this PR do? Updates elysia to version 1.4.13 Compare: https://github.com/elysiajs/elysia/compare/1.4.12...1.4.13 Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml) Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- test/vendor.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/vendor.json b/test/vendor.json index 05ca430f3a..06a7d08a12 100644 --- a/test/vendor.json +++ b/test/vendor.json @@ -2,6 +2,6 @@ { "package": "elysia", "repository": "https://github.com/elysiajs/elysia", - "tag": "1.4.12" + "tag": "1.4.13" } ] From a75cef50798950a3801678761960226d1a7046db Mon Sep 17 00:00:00 2001 From: robobun Date: Sun, 26 Oct 2025 01:28:27 -0700 Subject: [PATCH 008/102] Add comprehensive documentation for JSRef (#24095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Adds detailed documentation explaining JSRef's intended usage - Includes a complete example showing common patterns - Explains the three states (weak, strong, finalized) - Provides guidelines on when to use strong vs weak references - References real examples from the codebase (ServerWebSocket, UDPSocket, MySQLConnection, ValkeyClient) ## Motivation JSRef is a critical type for managing JavaScript object references from native code, but it lacked comprehensive documentation explaining its usage patterns and lifecycle management. This makes it clearer how to properly use JSRef to: - Safely maintain references to JS objects from native code - Control whether references prevent garbage collection - Manage the upgrade/downgrade pattern based on object activity ## Test plan Documentation-only change, no functional changes. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Bot Co-authored-by: Claude --- src/bun.js/bindings/JSRef.zig | 91 +++++++++++++++++++++++++++++++++-- 1 file changed, 88 insertions(+), 3 deletions(-) diff --git a/src/bun.js/bindings/JSRef.zig b/src/bun.js/bindings/JSRef.zig index 08928aa73e..a90e0087a7 100644 --- a/src/bun.js/bindings/JSRef.zig +++ b/src/bun.js/bindings/JSRef.zig @@ -1,7 +1,92 @@ -/// Holds a reference to a JSValue. +/// Holds a reference to a JSValue with lifecycle management. +/// +/// JSRef is used to safely maintain a reference to a JavaScript object from native code, +/// with explicit control over whether the reference keeps the object alive during garbage collection. +/// +/// # Common Usage Pattern +/// +/// JSRef is typically used in native objects that need to maintain a reference to their +/// corresponding JavaScript wrapper object. The reference can be upgraded to "strong" when +/// the native object has pending work or active connections, and downgraded to "weak" when idle: +/// +/// ```zig +/// const MyNativeObject = struct { +/// this_value: jsc.JSRef = .empty(), +/// connection: SomeConnection, +/// +/// pub fn init(globalObject: *jsc.JSGlobalObject) *MyNativeObject { +/// const this = MyNativeObject.new(.{}); +/// const this_value = this.toJS(globalObject); +/// // Start with strong ref - object has pending work (initialization) +/// this.this_value = .initStrong(this_value, globalObject); +/// return this; +/// } +/// +/// fn updateReferenceType(this: *MyNativeObject) void { +/// if (this.connection.isActive()) { +/// // Keep object alive while connection is active +/// if (this.this_value.isNotEmpty() and this.this_value == .weak) { +/// this.this_value.upgrade(globalObject); +/// } +/// } else { +/// // Allow GC when connection is idle +/// if (this.this_value.isNotEmpty() and this.this_value == .strong) { +/// this.this_value.downgrade(); +/// } +/// } +/// } +/// +/// pub fn onMessage(this: *MyNativeObject) void { +/// // Safely retrieve the JSValue if still alive +/// const this_value = this.this_value.tryGet() orelse return; +/// // Use this_value... +/// } +/// +/// pub fn finalize(this: *MyNativeObject) void { +/// // Called when JS object is being garbage collected +/// this.this_value.finalize(); +/// this.cleanup(); +/// } +/// }; +/// ``` +/// +/// # States +/// +/// - **weak**: Holds a JSValue directly. Does NOT prevent garbage collection. +/// The JSValue may become invalid if the object is collected. +/// Use `tryGet()` to safely check if the value is still alive. +/// +/// - **strong**: Holds a Strong reference that prevents garbage collection. +/// The JavaScript object will stay alive as long as this reference exists. +/// Must call `deinit()` or `finalize()` to release. +/// +/// - **finalized**: The reference has been finalized (object was GC'd or explicitly cleaned up). +/// Indicates the JSValue is no longer valid. `tryGet()` returns null. +/// +/// # Key Methods +/// +/// - `initWeak()` / `initStrong()`: Create a new JSRef in weak or strong mode +/// - `tryGet()`: Safely retrieve the JSValue if still alive (returns null if finalized or empty) +/// - `upgrade()`: Convert weak → strong to prevent GC +/// - `downgrade()`: Convert strong → weak to allow GC (keeps the JSValue if still alive) +/// - `finalize()`: Mark as finalized and release resources (typically called from GC finalizer) +/// - `deinit()`: Release resources without marking as finalized +/// +/// # When to Use Strong vs Weak +/// +/// Use **strong** references when: +/// - The native object has active operations (network connections, pending requests, timers) +/// - You need to guarantee the JS object stays alive +/// - You'll call methods on the JS object from callbacks +/// +/// Use **weak** references when: +/// - The native object is idle with no pending work +/// - The JS object should be GC-able if no other references exist +/// - You want to allow natural garbage collection +/// +/// Common pattern: Start strong, downgrade to weak when idle, upgrade to strong when active. +/// See ServerWebSocket, UDPSocket, MySQLConnection, and ValkeyClient for examples. /// -/// This reference can be either weak (a JSValue) or may be strong, in which -/// case it prevents the garbage collector from collecting the value. pub const JSRef = union(enum) { weak: jsc.JSValue, strong: jsc.Strong.Optional, From b7ae21d0bcf27f4a56e4d52b185a1a1eb0923651 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 26 Oct 2025 14:29:27 -0700 Subject: [PATCH 009/102] Mark flaky test as TODO --- test/js/web/fetch/fetch.stream.test.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/js/web/fetch/fetch.stream.test.ts b/test/js/web/fetch/fetch.stream.test.ts index bdbd646b65..c52a39252e 100644 --- a/test/js/web/fetch/fetch.stream.test.ts +++ b/test/js/web/fetch/fetch.stream.test.ts @@ -28,7 +28,9 @@ const empty = Buffer.alloc(0); describe.concurrent("fetch() with streaming", () => { [-1, 0, 20, 50, 100].forEach(timeout => { - it(`should be able to fail properly when reading from readable stream with timeout ${timeout}`, async () => { + // This test is flaky. + // Sometimes, we don't throw if signal.abort(). We need to fix that. + it.todo(`should be able to fail properly when reading from readable stream with timeout ${timeout}`, async () => { using server = Bun.serve({ port: 0, async fetch(req) { From b280e8d326c09277c4d000cee713d523cf2c1983 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 27 Oct 2025 02:37:05 -0700 Subject: [PATCH 010/102] Enable more sanitizers in CI (#24117) ### What does this PR do? We were only enabling UBSAN in debug builds. This was probably a mistake. ### How did you verify your code works? --- cmake/targets/BuildBun.cmake | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 113c61fbff..b5adbc4d43 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -944,7 +944,7 @@ if(NOT WIN32) if (NOT ABI STREQUAL "musl") target_compile_options(${bun} PUBLIC -fsanitize=null - -fsanitize-recover=all + -fno-sanitize-recover=all -fsanitize=bounds -fsanitize=return -fsanitize=nullability-arg @@ -999,6 +999,20 @@ if(NOT WIN32) ) if(ENABLE_ASAN) + target_compile_options(${bun} PUBLIC + -fsanitize=null + -fno-sanitize-recover=all + -fsanitize=bounds + -fsanitize=return + -fsanitize=nullability-arg + -fsanitize=nullability-assign + -fsanitize=nullability-return + -fsanitize=returns-nonnull-attribute + -fsanitize=unreachable + ) + target_link_libraries(${bun} PRIVATE + -fsanitize=null + ) target_compile_options(${bun} PUBLIC -fsanitize=address) target_link_libraries(${bun} PUBLIC -fsanitize=address) endif() From 1e849b905a5d8261ebcaa226a40feb0d8ca8f817 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Mon, 27 Oct 2025 11:26:09 -0800 Subject: [PATCH 011/102] zig: bun.sourcemap -> bun.SourceMap (#23477) --- src/StandaloneModuleGraph.zig | 2 +- src/bake/DevServer.zig | 2 +- src/bake/DevServer/IncrementalGraph.zig | 6 ++--- src/bake/DevServer/PackedMap.zig | 2 +- src/bake/DevServer/SourceMapStore.zig | 2 +- src/bun.js/SavedSourceMap.zig | 6 ++--- src/bun.js/VirtualMachine.zig | 2 +- .../bindings/generated_classes_list.zig | 2 +- src/bun.js/virtual_machine_exports.zig | 6 ++--- src/bun.zig | 4 ++-- src/bundler/Chunk.zig | 24 +++++++++---------- src/bundler/LinkerContext.zig | 20 ++++++++-------- src/bundler/LinkerGraph.zig | 2 +- src/bundler/bundle_v2.zig | 16 ++++++------- src/bundler/linker_context/computeChunks.zig | 12 +++++----- .../linker_context/postProcessCSSChunk.zig | 2 +- .../linker_context/postProcessJSChunk.zig | 2 +- .../linker_context/writeOutputFilesToDisk.zig | 1 - src/cli/test_command.zig | 20 ++++++++-------- src/js_printer.zig | 2 +- src/sourcemap/CodeCoverage.zig | 2 +- src/sourcemap/JSSourceMap.zig | 10 ++++---- 22 files changed, 73 insertions(+), 74 deletions(-) diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 49c659e0dd..97cdca6bd0 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -1551,7 +1551,7 @@ const w = std.os.windows; const bun = @import("bun"); const Environment = bun.Environment; const Output = bun.Output; -const SourceMap = bun.sourcemap; +const SourceMap = bun.SourceMap; const StringPointer = bun.StringPointer; const Syscall = bun.sys; const macho = bun.macho; diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index e85e874890..73756e46ef 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -4664,7 +4664,7 @@ fn extractPathnameFromUrl(url: []const u8) []const u8 { const bun = @import("bun"); const Environment = bun.Environment; const Output = bun.Output; -const SourceMap = bun.sourcemap; +const SourceMap = bun.SourceMap; const Watcher = bun.Watcher; const assert = bun.assert; const bake = bun.bake; diff --git a/src/bake/DevServer/IncrementalGraph.zig b/src/bake/DevServer/IncrementalGraph.zig index 837cdf2925..6cb775c573 100644 --- a/src/bake/DevServer/IncrementalGraph.zig +++ b/src/bake/DevServer/IncrementalGraph.zig @@ -2034,6 +2034,9 @@ const DynamicBitSetUnmanaged = bun.bit_set.DynamicBitSetUnmanaged; const Log = bun.logger.Log; const useAllFields = bun.meta.useAllFields; +const SourceMap = bun.SourceMap; +const VLQ = SourceMap.VLQ; + const DevServer = bake.DevServer; const ChunkKind = DevServer.ChunkKind; const DevAllocator = DevServer.DevAllocator; @@ -2059,9 +2062,6 @@ const Chunk = bun.bundle_v2.Chunk; const Owned = bun.ptr.Owned; const Shared = bun.ptr.Shared; -const SourceMap = bun.sourcemap; -const VLQ = SourceMap.VLQ; - const std = @import("std"); const ArrayListUnmanaged = std.ArrayListUnmanaged; const AutoArrayHashMapUnmanaged = std.AutoArrayHashMapUnmanaged; diff --git a/src/bake/DevServer/PackedMap.zig b/src/bake/DevServer/PackedMap.zig index 1fc9f75105..0821237651 100644 --- a/src/bake/DevServer/PackedMap.zig +++ b/src/bake/DevServer/PackedMap.zig @@ -114,7 +114,7 @@ pub const Shared = union(enum) { const bun = @import("bun"); const Environment = bun.Environment; -const SourceMap = bun.sourcemap; +const SourceMap = bun.SourceMap; const assert = bun.assert; const assert_eql = bun.assert_eql; const Chunk = bun.bundle_v2.Chunk; diff --git a/src/bake/DevServer/SourceMapStore.zig b/src/bake/DevServer/SourceMapStore.zig index a2de1f35bc..cd2a470130 100644 --- a/src/bake/DevServer/SourceMapStore.zig +++ b/src/bake/DevServer/SourceMapStore.zig @@ -544,7 +544,7 @@ pub fn getParsedSourceMap(store: *Self, script_id: Key, arena: Allocator, gpa: A const bun = @import("bun"); const Environment = bun.Environment; const Output = bun.Output; -const SourceMap = bun.sourcemap; +const SourceMap = bun.SourceMap; const StringJoiner = bun.StringJoiner; const assert = bun.assert; const bake = bun.bake; diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig index 7ad60b459e..64567ac553 100644 --- a/src/bun.js/SavedSourceMap.zig +++ b/src/bun.js/SavedSourceMap.zig @@ -384,8 +384,8 @@ const Output = bun.Output; const js_printer = bun.js_printer; const logger = bun.logger; -const SourceMap = bun.sourcemap; -const BakeSourceProvider = bun.sourcemap.BakeSourceProvider; -const DevServerSourceProvider = bun.sourcemap.DevServerSourceProvider; +const SourceMap = bun.SourceMap; +const BakeSourceProvider = bun.SourceMap.BakeSourceProvider; +const DevServerSourceProvider = bun.SourceMap.DevServerSourceProvider; const ParsedSourceMap = SourceMap.ParsedSourceMap; const SourceProviderMap = SourceMap.SourceProviderMap; diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig index 61f3fa5ae6..b5fe09398e 100644 --- a/src/bun.js/VirtualMachine.zig +++ b/src/bun.js/VirtualMachine.zig @@ -3711,7 +3711,7 @@ const Global = bun.Global; const MutableString = bun.MutableString; const Ordinal = bun.Ordinal; const Output = bun.Output; -const SourceMap = bun.sourcemap; +const SourceMap = bun.SourceMap; const String = bun.String; const Transpiler = bun.Transpiler; const Watcher = bun.Watcher; diff --git a/src/bun.js/bindings/generated_classes_list.zig b/src/bun.js/bindings/generated_classes_list.zig index 41705dbd11..f5e3655bc6 100644 --- a/src/bun.js/bindings/generated_classes_list.zig +++ b/src/bun.js/bindings/generated_classes_list.zig @@ -88,7 +88,7 @@ pub const Classes = struct { pub const RedisClient = api.Valkey; pub const BlockList = api.BlockList; pub const NativeZstd = api.NativeZstd; - pub const SourceMap = bun.sourcemap.JSSourceMap; + pub const SourceMap = bun.SourceMap.JSSourceMap; }; const bun = @import("bun"); diff --git a/src/bun.js/virtual_machine_exports.zig b/src/bun.js/virtual_machine_exports.zig index 63606ad9ed..c7f01734bc 100644 --- a/src/bun.js/virtual_machine_exports.zig +++ b/src/bun.js/virtual_machine_exports.zig @@ -224,10 +224,10 @@ const std = @import("std"); const bun = @import("bun"); const PluginRunner = bun.transpiler.PluginRunner; +const BakeSourceProvider = bun.SourceMap.BakeSourceProvider; +const DevServerSourceProvider = bun.SourceMap.DevServerSourceProvider; + const jsc = bun.jsc; const JSGlobalObject = jsc.JSGlobalObject; const JSValue = jsc.JSValue; const VirtualMachine = jsc.VirtualMachine; - -const BakeSourceProvider = bun.sourcemap.BakeSourceProvider; -const DevServerSourceProvider = bun.sourcemap.DevServerSourceProvider; diff --git a/src/bun.zig b/src/bun.zig index a1230783c9..fb4b98ba84 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1488,8 +1488,8 @@ pub fn concat(comptime T: type, dest: []T, src: []const []const T) void { } pub const renamer = @import("./renamer.zig"); -// TODO: Rename to SourceMap as this is a struct. -pub const sourcemap = @import("./sourcemap/sourcemap.zig"); + +pub const SourceMap = @import("./sourcemap/sourcemap.zig"); /// Attempt to coerce some value into a byte slice. pub fn asByteSlice(buffer: anytype) []const u8 { diff --git a/src/bundler/Chunk.zig b/src/bundler/Chunk.zig index cdf6eec74c..a7f880bd32 100644 --- a/src/bundler/Chunk.zig +++ b/src/bundler/Chunk.zig @@ -29,7 +29,7 @@ pub const Chunk = struct { has_html_chunk: bool = false, is_browser_chunk_from_server_build: bool = false, - output_source_map: sourcemap.SourceMapPieces, + output_source_map: SourceMap.SourceMapPieces, intermediate_output: IntermediateOutput = .{ .empty = {} }, isolated_hash: u64 = std.math.maxInt(u64), @@ -116,7 +116,7 @@ pub const Chunk = struct { pub const CodeResult = struct { buffer: []u8, - shifts: []sourcemap.SourceMapShifts, + shifts: []SourceMap.SourceMapShifts, }; pub fn getSize(this: *const IntermediateOutput) usize { @@ -181,12 +181,12 @@ pub const Chunk = struct { const entry_point_chunks_for_scb = linker_graph.files.items(.entry_point_chunk_index); var shift = if (enable_source_map_shifts) - sourcemap.SourceMapShifts{ + SourceMap.SourceMapShifts{ .after = .{}, .before = .{}, }; var shifts = if (enable_source_map_shifts) - try std.ArrayList(sourcemap.SourceMapShifts).initCapacity(bun.default_allocator, pieces.len + 1); + try std.ArrayList(SourceMap.SourceMapShifts).initCapacity(bun.default_allocator, pieces.len + 1); if (enable_source_map_shifts) shifts.appendAssumeCapacity(shift); @@ -245,7 +245,7 @@ pub const Chunk = struct { } const debug_id_len = if (enable_source_map_shifts and FeatureFlags.source_map_debug_id) - std.fmt.count("\n//# debugId={}\n", .{bun.sourcemap.DebugIDFormatter{ .id = chunk.isolated_hash }}) + std.fmt.count("\n//# debugId={}\n", .{bun.SourceMap.DebugIDFormatter{ .id = chunk.isolated_hash }}) else 0; @@ -256,7 +256,7 @@ pub const Chunk = struct { const data = piece.data(); if (enable_source_map_shifts) { - var data_offset = sourcemap.LineColumnOffset{}; + var data_offset = SourceMap.LineColumnOffset{}; data_offset.advance(data); shift.before.add(data_offset); shift.after.add(data_offset); @@ -353,7 +353,7 @@ pub const Chunk = struct { remain = remain[(std.fmt.bufPrint( remain, "\n//# debugId={}\n", - .{bun.sourcemap.DebugIDFormatter{ .id = chunk.isolated_hash }}, + .{bun.SourceMap.DebugIDFormatter{ .id = chunk.isolated_hash }}, ) catch |err| switch (err) { error.NoSpaceLeft => std.debug.panic( "unexpected NoSpaceLeft error from bufPrint", @@ -370,7 +370,7 @@ pub const Chunk = struct { .shifts = if (enable_source_map_shifts) shifts.items else - &[_]sourcemap.SourceMapShifts{}, + &[_]SourceMap.SourceMapShifts{}, }; }, .joiner => |*joiner| { @@ -386,7 +386,7 @@ pub const Chunk = struct { const debug_id_fmt = std.fmt.allocPrint( graph.heap.allocator(), "\n//# debugId={}\n", - .{bun.sourcemap.DebugIDFormatter{ .id = chunk.isolated_hash }}, + .{bun.SourceMap.DebugIDFormatter{ .id = chunk.isolated_hash }}, ) catch |err| bun.handleOom(err); break :brk try joiner.doneWithEnd(allocator, debug_id_fmt); @@ -397,12 +397,12 @@ pub const Chunk = struct { return .{ .buffer = buffer, - .shifts = &[_]sourcemap.SourceMapShifts{}, + .shifts = &[_]SourceMap.SourceMapShifts{}, }; }, .empty => return .{ .buffer = "", - .shifts = &[_]sourcemap.SourceMapShifts{}, + .shifts = &[_]SourceMap.SourceMapShifts{}, }, } } @@ -651,10 +651,10 @@ const FeatureFlags = bun.FeatureFlags; const ImportKind = bun.ImportKind; const ImportRecord = bun.ImportRecord; const Output = bun.Output; +const SourceMap = bun.SourceMap; const StringJoiner = bun.StringJoiner; const default_allocator = bun.default_allocator; const renamer = bun.renamer; -const sourcemap = bun.sourcemap; const strings = bun.strings; const AutoBitSet = bun.bit_set.AutoBitSet; const BabyList = bun.collections.BabyList; diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index 8b5e8aba8f..200d3c3d40 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -129,7 +129,7 @@ pub const LinkerContext = struct { pub fn computeLineOffsets(this: *LinkerContext, alloc: std.mem.Allocator, source_index: Index.Int) void { debug("Computing LineOffsetTable: {d}", .{source_index}); - const line_offset_table: *bun.sourcemap.LineOffsetTable.List = &this.graph.files.items(.line_offset_table)[source_index]; + const line_offset_table: *bun.SourceMap.LineOffsetTable.List = &this.graph.files.items(.line_offset_table)[source_index]; const source: *const Logger.Source = &this.parse_graph.input_files.items(.source)[source_index]; const loader: options.Loader = this.parse_graph.input_files.items(.loader)[source_index]; @@ -142,7 +142,7 @@ pub const LinkerContext = struct { const approximate_line_count = this.graph.ast.items(.approximate_newline_count)[source_index]; - line_offset_table.* = bun.sourcemap.LineOffsetTable.generate( + line_offset_table.* = bun.SourceMap.LineOffsetTable.generate( alloc, source.contents, @@ -686,7 +686,7 @@ pub const LinkerContext = struct { results: std.MultiArrayList(CompileResultForSourceMap), chunk_abs_dir: string, can_have_shifts: bool, - ) !sourcemap.SourceMapPieces { + ) !SourceMap.SourceMapPieces { const trace = bun.perf.trace("Bundler.generateSourceMapForChunk"); defer trace.end(); @@ -776,7 +776,7 @@ pub const LinkerContext = struct { ); const mapping_start = j.len; - var prev_end_state = sourcemap.SourceMapState{}; + var prev_end_state = SourceMap.SourceMapState{}; var prev_column_offset: i32 = 0; const source_map_chunks = results.items(.source_map_chunk); const offsets = results.items(.generated_offset); @@ -784,7 +784,7 @@ pub const LinkerContext = struct { const mapping_source_index = source_id_map.get(current_source_index) orelse unreachable; // the pass above during printing of "sources" must add the index - var start_state = sourcemap.SourceMapState{ + var start_state = SourceMap.SourceMapState{ .source_index = mapping_source_index, .generated_line = offset.lines.zeroBased(), .generated_column = offset.columns.zeroBased(), @@ -794,7 +794,7 @@ pub const LinkerContext = struct { start_state.generated_column += prev_column_offset; } - try sourcemap.appendSourceMapChunk(&j, worker.allocator, prev_end_state, start_state, chunk.buffer.list.items); + try SourceMap.appendSourceMapChunk(&j, worker.allocator, prev_end_state, start_state, chunk.buffer.list.items); prev_end_state = chunk.end_state; prev_end_state.source_index = mapping_source_index; @@ -810,7 +810,7 @@ pub const LinkerContext = struct { if (comptime FeatureFlags.source_map_debug_id) { j.pushStatic("\",\n \"debugId\": \""); j.push( - try std.fmt.allocPrint(worker.allocator, "{}", .{bun.sourcemap.DebugIDFormatter{ .id = isolated_hash }}), + try std.fmt.allocPrint(worker.allocator, "{}", .{bun.SourceMap.DebugIDFormatter{ .id = isolated_hash }}), worker.allocator, ); j.pushStatic("\",\n \"names\": []\n}"); @@ -821,7 +821,7 @@ pub const LinkerContext = struct { const done = try j.done(worker.allocator); bun.assert(done[0] == '{'); - var pieces = sourcemap.SourceMapPieces.init(worker.allocator); + var pieces = SourceMap.SourceMapPieces.init(worker.allocator); if (can_have_shifts) { try pieces.prefix.appendSlice(done[0..mapping_start]); try pieces.mappings.appendSlice(done[mapping_start..mapping_end]); @@ -1411,7 +1411,7 @@ pub const LinkerContext = struct { const SubstituteChunkFinalPathResult = struct { j: StringJoiner, - shifts: []sourcemap.SourceMapShifts, + shifts: []SourceMap.SourceMapShifts, }; pub fn mangleLocalCss(c: *LinkerContext) void { @@ -2684,11 +2684,11 @@ const MultiArrayList = bun.MultiArrayList; const MutableString = bun.MutableString; const OOM = bun.OOM; const Output = bun.Output; +const SourceMap = bun.SourceMap; const StringJoiner = bun.StringJoiner; const bake = bun.bake; const base64 = bun.base64; const renamer = bun.renamer; -const sourcemap = bun.sourcemap; const strings = bun.strings; const sync = bun.threading; const AutoBitSet = bun.bit_set.AutoBitSet; diff --git a/src/bundler/LinkerGraph.zig b/src/bundler/LinkerGraph.zig index c160e96c28..8fd01500ae 100644 --- a/src/bundler/LinkerGraph.zig +++ b/src/bundler/LinkerGraph.zig @@ -458,7 +458,7 @@ pub const File = struct { /// a Source.Index to its output path inb reakOutputIntoPieces entry_point_chunk_index: u32 = std.math.maxInt(u32), - line_offset_table: bun.sourcemap.LineOffsetTable.List = .empty, + line_offset_table: bun.SourceMap.LineOffsetTable.List = .empty, quoted_source_contents: Owned(?[]u8) = .initNull(), pub fn isEntryPoint(this: *const File) bool { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 9b5d912dd5..712a8a99b5 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2868,7 +2868,7 @@ pub const BundleV2 = struct { .parts_in_chunk_in_order = js_part_ranges, }, }, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), }; // Then all the distinct CSS bundles (these are JS->CSS, not CSS->CSS) @@ -2886,7 +2886,7 @@ pub const BundleV2 = struct { .asts = try this.allocator().alloc(bun.css.BundlerStyleSheet, order.len), }, }, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), }; } @@ -2899,7 +2899,7 @@ pub const BundleV2 = struct { .is_entry_point = false, }, .content = .html, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), }; } @@ -4264,7 +4264,7 @@ pub const CompileResult = union(enum) { css: struct { result: bun.Maybe([]const u8, anyerror), source_index: Index.Int, - source_map: ?bun.sourcemap.Chunk = null, + source_map: ?bun.SourceMap.Chunk = null, }, html: struct { source_index: Index.Int, @@ -4295,7 +4295,7 @@ pub const CompileResult = union(enum) { }; } - pub fn sourceMapChunk(this: *const CompileResult) ?sourcemap.Chunk { + pub fn sourceMapChunk(this: *const CompileResult) ?SourceMap.Chunk { return switch (this.*) { .javascript => |r| switch (r.result) { .result => |r2| r2.source_map, @@ -4314,8 +4314,8 @@ pub const CompileResult = union(enum) { }; pub const CompileResultForSourceMap = struct { - source_map_chunk: sourcemap.Chunk, - generated_offset: sourcemap.LineColumnOffset, + source_map_chunk: SourceMap.Chunk, + generated_offset: SourceMap.LineColumnOffset, source_index: u32, }; @@ -4503,7 +4503,7 @@ pub const Part = js_ast.Part; pub const js_printer = @import("../js_printer.zig"); pub const js_ast = bun.ast; pub const linker = @import("../linker.zig"); -pub const sourcemap = bun.sourcemap; +pub const SourceMap = bun.SourceMap; pub const StringJoiner = bun.StringJoiner; pub const base64 = bun.base64; pub const Ref = bun.ast.Ref; diff --git a/src/bundler/linker_context/computeChunks.zig b/src/bundler/linker_context/computeChunks.zig index fd98457e4c..18e8910a6b 100644 --- a/src/bundler/linker_context/computeChunks.zig +++ b/src/bundler/linker_context/computeChunks.zig @@ -63,7 +63,7 @@ pub noinline fn computeChunks( }, .entry_bits = entry_bits.*, .content = .html, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; } @@ -97,7 +97,7 @@ pub noinline fn computeChunks( .asts = bun.handleOom(this.allocator().alloc(bun.css.BundlerStyleSheet, order.len)), }, }, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), .has_html_chunk = has_html_chunk, .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; @@ -120,7 +120,7 @@ pub noinline fn computeChunks( .javascript = .{}, }, .has_html_chunk = has_html_chunk, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; @@ -173,7 +173,7 @@ pub noinline fn computeChunks( }, }, .files_with_parts_in_chunk = css_files_with_parts_in_chunk, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), .has_html_chunk = has_html_chunk, .is_browser_chunk_from_server_build = could_be_browser_target_from_server_build and ast_targets[source_index] == .browser, }; @@ -217,7 +217,7 @@ pub noinline fn computeChunks( .content = .{ .javascript = .{}, }, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator()), + .output_source_map = SourceMap.SourceMapPieces.init(this.allocator()), .is_browser_chunk_from_server_build = is_browser_chunk_from_server_build, }; } @@ -422,8 +422,8 @@ const std = @import("std"); const bun = @import("bun"); const BabyList = bun.BabyList; +const SourceMap = bun.SourceMap; const options = bun.options; -const sourcemap = bun.sourcemap; const AutoBitSet = bun.bit_set.AutoBitSet; const bundler = bun.bundle_v2; diff --git a/src/bundler/linker_context/postProcessCSSChunk.zig b/src/bundler/linker_context/postProcessCSSChunk.zig index c969b53c0e..1f2b6dc5b4 100644 --- a/src/bundler/linker_context/postProcessCSSChunk.zig +++ b/src/bundler/linker_context/postProcessCSSChunk.zig @@ -8,7 +8,7 @@ pub fn postProcessCSSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, ch }, }; - var line_offset: bun.sourcemap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; + var line_offset: bun.SourceMap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; var newline_before_comment = false; diff --git a/src/bundler/linker_context/postProcessJSChunk.zig b/src/bundler/linker_context/postProcessJSChunk.zig index 110b3870cd..d174286d16 100644 --- a/src/bundler/linker_context/postProcessJSChunk.zig +++ b/src/bundler/linker_context/postProcessJSChunk.zig @@ -110,7 +110,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu errdefer j.deinit(); const output_format = c.options.output_format; - var line_offset: bun.sourcemap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; + var line_offset: bun.SourceMap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; // Concatenate the generated JavaScript chunks together diff --git a/src/bundler/linker_context/writeOutputFilesToDisk.zig b/src/bundler/linker_context/writeOutputFilesToDisk.zig index e49fd8c7e1..5d4081a7e0 100644 --- a/src/bundler/linker_context/writeOutputFilesToDisk.zig +++ b/src/bundler/linker_context/writeOutputFilesToDisk.zig @@ -426,7 +426,6 @@ const base64 = bun.base64; const default_allocator = bun.default_allocator; const jsc = bun.jsc; const options = bun.options; -const sourcemap = bun.sourcemap; const strings = bun.strings; const bundler = bun.bundle_v2; diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 8436e1394d..6466f14b88 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -960,7 +960,7 @@ pub const CommandLineReporter = struct { var map = coverage.ByteRangeMapping.map orelse return; var iter = map.valueIterator(); - var byte_ranges = try std.ArrayList(bun.sourcemap.coverage.ByteRangeMapping).initCapacity(bun.default_allocator, map.count()); + var byte_ranges = try std.ArrayList(bun.SourceMap.coverage.ByteRangeMapping).initCapacity(bun.default_allocator, map.count()); while (iter.next()) |entry| { byte_ranges.appendAssumeCapacity(entry.*); @@ -971,10 +971,10 @@ pub const CommandLineReporter = struct { } std.sort.pdq( - bun.sourcemap.coverage.ByteRangeMapping, + bun.SourceMap.coverage.ByteRangeMapping, byte_ranges.items, {}, - bun.sourcemap.coverage.ByteRangeMapping.isLessThan, + bun.SourceMap.coverage.ByteRangeMapping.isLessThan, ); try this.printCodeCoverage(vm, opts, byte_ranges.items, reporters, enable_ansi_colors); @@ -984,7 +984,7 @@ pub const CommandLineReporter = struct { _: *CommandLineReporter, vm: *jsc.VirtualMachine, opts: *TestCommand.CodeCoverageOptions, - byte_ranges: []bun.sourcemap.coverage.ByteRangeMapping, + byte_ranges: []bun.SourceMap.coverage.ByteRangeMapping, comptime reporters: TestCommand.Reporters, comptime enable_ansi_colors: bool, ) !void { @@ -1054,7 +1054,7 @@ pub const CommandLineReporter = struct { var console_buffer_buffer = console_buffer.bufferedWriter(); var console_writer = console_buffer_buffer.writer(); - var avg = bun.sourcemap.coverage.Fraction{ + var avg = bun.SourceMap.coverage.Fraction{ .functions = 0.0, .lines = 0.0, .stmts = 0.0, @@ -1185,7 +1185,7 @@ pub const CommandLineReporter = struct { avg.stmts /= avg_count; } - const failed = if (avg_count > 0) base_fraction else bun.sourcemap.coverage.Fraction{ + const failed = if (avg_count > 0) base_fraction else bun.SourceMap.coverage.Fraction{ .functions = 0, .lines = 0, .stmts = 0, @@ -1280,7 +1280,7 @@ pub const TestCommand = struct { skip_test_files: bool = !Environment.allow_assert, reporters: Reporters = .{ .text = true, .lcov = false }, reports_directory: string = "coverage", - fractions: bun.sourcemap.coverage.Fraction = .{}, + fractions: bun.SourceMap.coverage.Fraction = .{}, ignore_sourcemap: bool = false, enabled: bool = false, fail_on_low_coverage: bool = false, @@ -2010,12 +2010,12 @@ const strings = bun.strings; const uws = bun.uws; const HTTPThread = bun.http.HTTPThread; +const coverage = bun.SourceMap.coverage; +const CodeCoverageReport = coverage.Report; + const jsc = bun.jsc; const jest = jsc.Jest; const Snapshots = jsc.Snapshot.Snapshots; const TestRunner = jsc.Jest.TestRunner; const Test = TestRunner.Test; - -const coverage = bun.sourcemap.coverage; -const CodeCoverageReport = coverage.Report; diff --git a/src/js_printer.zig b/src/js_printer.zig index c510555bba..15fee9f6eb 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -390,7 +390,7 @@ pub const Options = struct { allocator: std.mem.Allocator = default_allocator, source_map_allocator: ?std.mem.Allocator = null, source_map_handler: ?SourceMapHandler = null, - source_map_builder: ?*bun.sourcemap.Chunk.Builder = null, + source_map_builder: ?*bun.SourceMap.Chunk.Builder = null, css_import_behavior: api.CssInJsBehavior = api.CssInJsBehavior.facade, target: options.Target = .browser, diff --git a/src/sourcemap/CodeCoverage.zig b/src/sourcemap/CodeCoverage.zig index bc4a8ee4be..b6d6899ce4 100644 --- a/src/sourcemap/CodeCoverage.zig +++ b/src/sourcemap/CodeCoverage.zig @@ -726,7 +726,7 @@ const std = @import("std"); const bun = @import("bun"); const Bitset = bun.bit_set.DynamicBitSetUnmanaged; -const LineOffsetTable = bun.sourcemap.LineOffsetTable; +const LineOffsetTable = bun.SourceMap.LineOffsetTable; const Output = bun.Output; const prettyFmt = Output.prettyFmt; diff --git a/src/sourcemap/JSSourceMap.zig b/src/sourcemap/JSSourceMap.zig index 0414359019..6162eda416 100644 --- a/src/sourcemap/JSSourceMap.zig +++ b/src/sourcemap/JSSourceMap.zig @@ -2,7 +2,7 @@ /// const JSSourceMap = @This(); -sourcemap: *bun.sourcemap.ParsedSourceMap, +sourcemap: *bun.SourceMap.ParsedSourceMap, sources: []bun.String = &.{}, names: []bun.String = &.{}, @@ -136,7 +136,7 @@ pub fn constructor( } // Parse the VLQ mappings - const parse_result = bun.sourcemap.Mapping.parse( + const parse_result = bun.SourceMap.Mapping.parse( bun.default_allocator, mappings_str.slice(), null, // estimated_mapping_count @@ -156,7 +156,7 @@ pub fn constructor( }; const source_map = bun.new(JSSourceMap, .{ - .sourcemap = bun.new(bun.sourcemap.ParsedSourceMap, mapping_list), + .sourcemap = bun.new(bun.SourceMap.ParsedSourceMap, mapping_list), .sources = sources.items, .names = names.items, }); @@ -200,7 +200,7 @@ fn getLineColumn(globalObject: *JSGlobalObject, callFrame: *CallFrame) bun.JSErr }; } -fn mappingNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.sourcemap.Mapping) bun.JSError!JSValue { +fn mappingNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.SourceMap.Mapping) bun.JSError!JSValue { const name_index = mapping.nameIndex(); if (name_index >= 0) { if (this.sourcemap.mappings.getName(name_index)) |name| { @@ -215,7 +215,7 @@ fn mappingNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapp return .js_undefined; } -fn sourceNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.sourcemap.Mapping) bun.JSError!JSValue { +fn sourceNameToJS(this: *const JSSourceMap, globalObject: *JSGlobalObject, mapping: *const bun.SourceMap.Mapping) bun.JSError!JSValue { const source_index = mapping.sourceIndex(); if (source_index >= 0 and source_index < @as(i32, @intCast(this.sources.len))) { return this.sources[@intCast(source_index)].toJS(globalObject); From 2afafbfa23b42f7eb1877cbeae012cd0855674ed Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Mon, 27 Oct 2025 11:26:21 -0800 Subject: [PATCH 012/102] zig: remove Location.suggestion (#23478) --- src/api/schema.zig | 5 ----- src/bundler/ParseTask.zig | 1 - src/logger.zig | 10 +--------- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/src/api/schema.zig b/src/api/schema.zig index 8e28eb94fd..cbb4274055 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -2335,9 +2335,6 @@ pub const api = struct { /// line_text line_text: []const u8, - /// suggestion - suggestion: []const u8, - /// offset offset: u32 = 0, @@ -2349,7 +2346,6 @@ pub const api = struct { this.line = try reader.readValue(i32); this.column = try reader.readValue(i32); this.line_text = try reader.readValue([]const u8); - this.suggestion = try reader.readValue([]const u8); this.offset = try reader.readValue(u32); return this; } @@ -2360,7 +2356,6 @@ pub const api = struct { try writer.writeInt(this.line); try writer.writeInt(this.column); try writer.writeValue(@TypeOf(this.line_text), this.line_text); - try writer.writeValue(@TypeOf(this.suggestion), this.suggestion); try writer.writeInt(this.offset); } }; diff --git a/src/bundler/ParseTask.zig b/src/bundler/ParseTask.zig index 60241db5dd..a20b43cb53 100644 --- a/src/bundler/ParseTask.zig +++ b/src/bundler/ParseTask.zig @@ -833,7 +833,6 @@ const OnBeforeParsePlugin = struct { @max(this.column, -1), @max(this.column_end - this.column, 0), if (source_line_text.len > 0) bun.handleOom(allocator.dupe(u8, source_line_text)) else null, - null, ); var msg = Logger.Msg{ .data = .{ .location = location, .text = bun.handleOom(allocator.dupe(u8, this.message())) } }; switch (this.level) { diff --git a/src/logger.zig b/src/logger.zig index c778cc9cad..ab70c6b1c5 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -86,8 +86,6 @@ pub const Location = struct { length: usize = 0, /// Text on the line, avoiding the need to refetch the source code line_text: ?string = null, - // TODO: remove this unused field - suggestion: ?string = null, // TODO: document or remove offset: usize = 0, @@ -96,7 +94,6 @@ pub const Location = struct { cost += this.file.len; cost += this.namespace.len; if (this.line_text) |text| cost += text.len; - if (this.suggestion) |text| cost += text.len; return cost; } @@ -104,7 +101,6 @@ pub const Location = struct { builder.count(this.file); builder.count(this.namespace); if (this.line_text) |text| builder.count(text); - if (this.suggestion) |text| builder.count(text); } pub fn clone(this: Location, allocator: std.mem.Allocator) !Location { @@ -115,7 +111,6 @@ pub const Location = struct { .column = this.column, .length = this.length, .line_text = if (this.line_text != null) try allocator.dupe(u8, this.line_text.?) else null, - .suggestion = if (this.suggestion != null) try allocator.dupe(u8, this.suggestion.?) else null, .offset = this.offset, }; } @@ -128,7 +123,6 @@ pub const Location = struct { .column = this.column, .length = this.length, .line_text = if (this.line_text != null) string_builder.append(this.line_text.?) else null, - .suggestion = if (this.suggestion != null) string_builder.append(this.suggestion.?) else null, .offset = this.offset, }; } @@ -140,7 +134,6 @@ pub const Location = struct { .line = this.line, .column = this.column, .line_text = this.line_text orelse "", - .suggestion = this.suggestion orelse "", .offset = @as(u32, @truncate(this.offset)), }; } @@ -148,7 +141,7 @@ pub const Location = struct { // don't really know what's safe to deinit here! pub fn deinit(_: *Location, _: std.mem.Allocator) void {} - pub fn init(file: string, namespace: string, line: i32, column: i32, length: u32, line_text: ?string, suggestion: ?string) Location { + pub fn init(file: string, namespace: string, line: i32, column: i32, length: u32, line_text: ?string) Location { return Location{ .file = file, .namespace = namespace, @@ -156,7 +149,6 @@ pub const Location = struct { .column = column, .length = length, .line_text = line_text, - .suggestion = suggestion, .offset = length, }; } From 64bfd8b938fc92922481cc8e4c40e0c66c54c9cc Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Mon, 27 Oct 2025 11:49:41 -0800 Subject: [PATCH 013/102] Revert "deps: update elysia to 1.4.13" (#24133) --- test/vendor.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/vendor.json b/test/vendor.json index 06a7d08a12..05ca430f3a 100644 --- a/test/vendor.json +++ b/test/vendor.json @@ -2,6 +2,6 @@ { "package": "elysia", "repository": "https://github.com/elysiajs/elysia", - "tag": "1.4.13" + "tag": "1.4.12" } ] From f3ed784a6b356b6552964f042ad532f81e95feda Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Mon, 27 Oct 2025 12:11:00 -0800 Subject: [PATCH 014/102] scripts: teach machine.mjs how to spawn a freebsd image on aws (#24109) exploratory look into https://github.com/oven-sh/bun/issues/1524 this still leaves that far off from being closed but an important first step this is important because this script is used to spawn our base images for CI and will provide boxes for local testing not sure how far i'll get but a rough "road to freebsd" map for anyone reading: - [x] this - [ ] ensure `bootstrap.sh` can run successfully - [ ] ensure WebKit can build from source - [ ] ensure other dependencies can build from source - [ ] add freebsd to our WebKit fork releases - [ ] add freebsd to our Zig fork releases - [ ] ensure bun can build from source - [ ] run `[build images]` and add freebsd to CI - [ ] fix runtime test failures image --- package.json | 1 + scripts/machine.mjs | 25 +++++++++++++++++++++++-- scripts/utils.mjs | 12 +++++++----- 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index c0fcee4b5f..b5d18cf725 100644 --- a/package.json +++ b/package.json @@ -89,6 +89,7 @@ "machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.22", "machine:linux:amazonlinux": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=amazonlinux --release=2023", "machine:windows:2019": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=windows --release=2019", + "machine:freebsd": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.large --os=freebsd --release=14.3", "sync-webkit-source": "bun ./scripts/sync-webkit-source.ts" } } diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 5ea5cdae66..5c7998aa80 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -389,6 +389,9 @@ const aws = { owner = "amazon"; name = `Windows_Server-${release || "*"}-English-Full-Base-*`; } + } else if (os === "freebsd") { + owner = "782442783595"; // upstream member of FreeBSD team, likely Colin Percival + name = `FreeBSD ${release}-STABLE-${{ "aarch64": "arm64", "x64": "amd64" }[arch] ?? "amd64"}-* UEFI-PREFERRED cloud-init UFS`; } if (!name) { @@ -400,6 +403,7 @@ const aws = { "owner-alias": owner, "name": name, }); + // console.table(baseImages.map(v => v.Name)); if (!baseImages.length) { throw new Error(`No base image found: ${inspect(options)}`); @@ -425,6 +429,8 @@ const aws = { } const { ImageId, Name, RootDeviceName, BlockDeviceMappings } = image; + // console.table({ os, arch, instanceType, Name, ImageId }); + const blockDeviceMappings = BlockDeviceMappings.map(device => { const { DeviceName } = device; if (DeviceName === RootDeviceName) { @@ -620,6 +626,7 @@ const aws = { * @property {SshKey[]} [sshKeys] * @property {string} [username] * @property {string} [password] + * @property {Os} [os] */ /** @@ -648,6 +655,7 @@ function getCloudInit(cloudInit) { const authorizedKeys = cloudInit["sshKeys"]?.map(({ publicKey }) => publicKey) || []; let sftpPath = "/usr/lib/openssh/sftp-server"; + let shell = "/bin/bash"; switch (cloudInit["distro"]) { case "alpine": sftpPath = "/usr/lib/ssh/sftp-server"; @@ -658,6 +666,18 @@ function getCloudInit(cloudInit) { sftpPath = "/usr/libexec/openssh/sftp-server"; break; } + switch (cloudInit["os"]) { + case "linux": + case "windows": + // handled above + break; + case "freebsd": + sftpPath = "/usr/libexec/openssh/sftp-server"; + shell = "/bin/csh"; + break; + default: + throw new Error(`Unsupported os: ${cloudInit["os"]}`); + } let users; if (username === "root") { @@ -671,7 +691,7 @@ function getCloudInit(cloudInit) { users: - name: ${username} sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash + shell: ${shell} ssh_authorized_keys: ${authorizedKeys.map(key => ` - ${key}`).join("\n")} @@ -1050,7 +1070,7 @@ function getCloud(name) { } /** - * @typedef {"linux" | "darwin" | "windows"} Os + * @typedef {"linux" | "darwin" | "windows" | "freebsd"} Os * @typedef {"aarch64" | "x64"} Arch * @typedef {"macos" | "windowsserver" | "debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro */ @@ -1204,6 +1224,7 @@ async function main() { }; let { detached, bootstrap, ci, os, arch, distro, release, features } = options; + if (os === "freebsd") bootstrap = false; let name = `${os}-${arch}-${(release || "").replace(/\./g, "")}`; diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 604227f9cd..c9ad28be53 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -1538,7 +1538,7 @@ export function parseNumber(value) { /** * @param {string} string - * @returns {"darwin" | "linux" | "windows"} + * @returns {"darwin" | "linux" | "windows" | "freebsd"} */ export function parseOs(string) { if (/darwin|apple|mac/i.test(string)) { @@ -1550,6 +1550,9 @@ export function parseOs(string) { if (/win/i.test(string)) { return "windows"; } + if (/freebsd/i.test(string)) { + return "freebsd"; + } throw new Error(`Unsupported operating system: ${string}`); } @@ -1900,22 +1903,21 @@ export function getUsernameForDistro(distro) { if (/windows/i.test(distro)) { return "administrator"; } - if (/alpine|centos/i.test(distro)) { return "root"; } - if (/debian/i.test(distro)) { return "admin"; } - if (/ubuntu/i.test(distro)) { return "ubuntu"; } - if (/amazon|amzn|al\d+|rhel/i.test(distro)) { return "ec2-user"; } + if (/freebsd/i.test(distro)) { + return "root"; + } throw new Error(`Unsupported distro: ${distro}`); } From 6580b563b00c55270a68f42374afe5c2ff36d14d Mon Sep 17 00:00:00 2001 From: robobun Date: Mon, 27 Oct 2025 14:19:38 -0700 Subject: [PATCH 015/102] Refactor Subprocess to use JSRef instead of hasPendingActivity (#24090) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Refactors `Subprocess` to use explicit strong/weak reference management via `JSRef` instead of the `hasPendingActivity` mechanism that relies on JSC's internal `WeakHandleOwner`. ## Changes ### Core Refactoring - **JSRef.zig**: Added `update()` method to update references in-place - **subprocess.zig**: Changed `this_jsvalue: JSValue` to `this_value: JSRef` - **subprocess.zig**: Renamed `hasPendingActivityNonThreadsafe()` to `computeHasPendingActivity()` - **subprocess.zig**: Updated `updateHasPendingActivity()` to upgrade/downgrade `JSRef` based on pending activity - **subprocess.zig**: Removed `hasPendingActivity()` C callback function - **subprocess.zig**: Updated `finalize()` to call `this_value.finalize()` - **BunObject.classes.ts**: Set `hasPendingActivity: false` for Subprocess - **Writable.zig**: Updated references from `this_jsvalue` to `this_value.tryGet()` - **ipc.zig**: Updated references from `this_jsvalue` to `this_value.tryGet()` ## How It Works **Before**: Used `hasPendingActivity: true` which created a `JSC::Weak` reference with a `JSC::WeakHandleOwner` that kept the object alive as long as the C callback returned true. **After**: Uses `JSRef` with explicit lifecycle management: 1. Starts with a **weak** reference when subprocess is created 2. Immediately calls `updateHasPendingActivity()` after creation 3. **Upgrades to strong** reference when `computeHasPendingActivity()` returns true: - Subprocess hasn't exited - Has active stdio streams - Has active IPC connection 4. **Downgrades to weak** reference when all activity completes 5. GC can collect the subprocess once it's weak and no other references exist ## Benefits - Explicit control over subprocess lifecycle instead of relying on JSC's internal mechanisms - Clearer semantics: strong reference = "keep alive", weak reference = "can be GC'd" - Removes dependency on `WeakHandleOwner` callback overhead ## Testing - ✅ `test/js/bun/spawn/spawn.ipc.test.ts` - All 4 tests pass - ✅ `test/js/bun/spawn/spawn-stress.test.ts` - All tests pass (100 iterations) - ⚠️ `test/js/bun/spawn/spawnSync.test.ts` - 3/6 pass (3 pre-existing timing-based failures unrelated to this change) Manual testing confirms: - Subprocess is kept alive without user reference while running - Subprocess can be GC'd after completion - IPC keeps subprocess alive correctly - No crashes or memory leaks 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude Bot Co-authored-by: Claude Co-authored-by: Jarred Sumner --- src/bun.js/api/BunObject.classes.ts | 1 - src/bun.js/api/bun/subprocess.zig | 50 ++++++++++++---------- src/bun.js/api/bun/subprocess/Writable.zig | 8 ++-- src/bun.js/bindings/JSRef.zig | 17 ++++++++ src/bun.js/ipc.zig | 2 +- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/src/bun.js/api/BunObject.classes.ts b/src/bun.js/api/BunObject.classes.ts index 455c750f5a..b55a31d629 100644 --- a/src/bun.js/api/BunObject.classes.ts +++ b/src/bun.js/api/BunObject.classes.ts @@ -46,7 +46,6 @@ export default [ construct: true, noConstructor: true, finalize: true, - hasPendingActivity: true, configurable: false, memoryCost: true, klass: {}, diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index c0f0024e06..3ec2bd7cfb 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -28,8 +28,7 @@ observable_getters: std.enums.EnumSet(enum { stdio, }) = .{}, closed: std.enums.EnumSet(StdioKind) = .{}, -has_pending_activity: std.atomic.Value(bool) = std.atomic.Value(bool).init(true), -this_jsvalue: jsc.JSValue = .zero, +this_value: jsc.JSRef = jsc.JSRef.empty(), /// `null` indicates all of the IPC data is uninitialized. ipc_data: ?IPC.SendQueue, @@ -169,7 +168,7 @@ pub fn hasExited(this: *const Subprocess) bool { return this.process.hasExited(); } -pub fn hasPendingActivityNonThreadsafe(this: *const Subprocess) bool { +pub fn computeHasPendingActivity(this: *const Subprocess) bool { if (this.ipc_data != null) { return true; } @@ -186,16 +185,19 @@ pub fn hasPendingActivityNonThreadsafe(this: *const Subprocess) bool { } pub fn updateHasPendingActivity(this: *Subprocess) void { + if (this.flags.is_sync) return; + + const has_pending = this.computeHasPendingActivity(); if (comptime Environment.isDebug) { - log("updateHasPendingActivity() {any} -> {any}", .{ - this.has_pending_activity.raw, - this.hasPendingActivityNonThreadsafe(), - }); + log("updateHasPendingActivity() -> {any}", .{has_pending}); + } + + // Upgrade or downgrade the reference based on pending activity + if (has_pending) { + this.this_value.upgrade(this.globalThis); + } else { + this.this_value.downgrade(); } - this.has_pending_activity.store( - this.hasPendingActivityNonThreadsafe(), - .monotonic, - ); } pub fn hasPendingActivityStdio(this: *const Subprocess) bool { @@ -247,10 +249,6 @@ pub fn onCloseIO(this: *Subprocess, kind: StdioKind) void { } } -pub fn hasPendingActivity(this: *Subprocess) callconv(.C) bool { - return this.has_pending_activity.load(.acquire); -} - pub fn jsRef(this: *Subprocess) void { this.process.enableKeepingEventLoopAlive(); @@ -406,7 +404,9 @@ pub fn kill( globalThis: *JSGlobalObject, callframe: *jsc.CallFrame, ) bun.JSError!JSValue { - this.this_jsvalue = callframe.this(); + // Safe: this method can only be called while the object is alive (reachable from JS) + // The finalizer only runs when the object becomes unreachable + this.this_value.update(globalThis, callframe.this()); const arguments = callframe.arguments_old(1); // If signal is 0, then no actual signal is sent, but error checking @@ -606,7 +606,7 @@ fn consumeOnDisconnectCallback(this_jsvalue: JSValue, globalThis: *jsc.JSGlobalO pub fn onProcessExit(this: *Subprocess, process: *Process, status: bun.spawn.Status, rusage: *const Rusage) void { log("onProcessExit()", .{}); - const this_jsvalue = this.this_jsvalue; + const this_jsvalue = this.this_value.tryGet() orelse .zero; const globalThis = this.globalThis; const jsc_vm = globalThis.bunVM(); this_jsvalue.ensureStillAlive(); @@ -809,11 +809,11 @@ pub fn finalize(this: *Subprocess) callconv(.C) void { // Ensure any code which references the "this" value doesn't attempt to // access it after it's been freed We cannot call any methods which // access GC'd values during the finalizer - this.this_jsvalue = .zero; + this.this_value.finalize(); this.clearAbortSignal(); - bun.assert(!this.hasPendingActivity() or jsc.VirtualMachine.get().isShuttingDown()); + bun.assert(!this.computeHasPendingActivity() or jsc.VirtualMachine.get().isShuttingDown()); this.finalizeStreams(); this.process.detach(); @@ -1567,7 +1567,11 @@ pub fn spawnMaybeSync( subprocess.toJS(globalThis) else JSValue.zero; - subprocess.this_jsvalue = out; + if (out != .zero) { + subprocess.this_value.setWeak(out); + // Immediately upgrade to strong if there's pending activity to prevent premature GC + subprocess.updateHasPendingActivity(); + } var send_exit_notification = false; @@ -1703,7 +1707,7 @@ pub fn spawnMaybeSync( defer { jsc_vm.uwsLoop().internal_loop_data.jsc_vm = old_vm; } - while (subprocess.hasPendingActivityNonThreadsafe()) { + while (subprocess.computeHasPendingActivity()) { if (subprocess.stdin == .buffer) { subprocess.stdin.buffer.watch(); } @@ -1778,7 +1782,7 @@ pub fn handleIPCMessage( }, .data => |data| { IPC.log("Received IPC message from child", .{}); - const this_jsvalue = this.this_jsvalue; + const this_jsvalue = this.this_value.tryGet() orelse .zero; defer this_jsvalue.ensureStillAlive(); if (this_jsvalue != .zero) { if (jsc.Codegen.JSSubprocess.ipcCallbackGetCached(this_jsvalue)) |cb| { @@ -1801,7 +1805,7 @@ pub fn handleIPCMessage( pub fn handleIPCClose(this: *Subprocess) void { IPClog("Subprocess#handleIPCClose", .{}); - const this_jsvalue = this.this_jsvalue; + const this_jsvalue = this.this_value.tryGet() orelse .zero; defer this_jsvalue.ensureStillAlive(); const globalThis = this.globalThis; this.updateHasPendingActivity(); diff --git a/src/bun.js/api/bun/subprocess/Writable.zig b/src/bun.js/api/bun/subprocess/Writable.zig index 47e61ec1b4..dde982beef 100644 --- a/src/bun.js/api/bun/subprocess/Writable.zig +++ b/src/bun.js/api/bun/subprocess/Writable.zig @@ -54,8 +54,8 @@ pub const Writable = union(enum) { pub fn onClose(this: *Writable, _: ?bun.sys.Error) void { const process: *Subprocess = @fieldParentPtr("stdin", this); - if (process.this_jsvalue != .zero) { - if (js.stdinGetCached(process.this_jsvalue)) |existing_value| { + if (process.this_value.tryGet()) |this_jsvalue| { + if (js.stdinGetCached(this_jsvalue)) |existing_value| { jsc.WebCore.FileSink.JSSink.setDestroyCallback(existing_value, 0); } } @@ -270,8 +270,8 @@ pub const Writable = union(enum) { pub fn finalize(this: *Writable) void { const subprocess: *Subprocess = @fieldParentPtr("stdin", this); - if (subprocess.this_jsvalue != .zero) { - if (jsc.Codegen.JSSubprocess.stdinGetCached(subprocess.this_jsvalue)) |existing_value| { + if (subprocess.this_value.tryGet()) |this_jsvalue| { + if (jsc.Codegen.JSSubprocess.stdinGetCached(this_jsvalue)) |existing_value| { jsc.WebCore.FileSink.JSSink.setDestroyCallback(existing_value, 0); } } diff --git a/src/bun.js/bindings/JSRef.zig b/src/bun.js/bindings/JSRef.zig index a90e0087a7..a8e8516570 100644 --- a/src/bun.js/bindings/JSRef.zig +++ b/src/bun.js/bindings/JSRef.zig @@ -201,6 +201,23 @@ pub const JSRef = union(enum) { this.deinit(); this.* = .{ .finalized = {} }; } + + pub fn update(this: *@This(), globalThis: *jsc.JSGlobalObject, value: JSValue) void { + switch (this.*) { + .weak => { + bun.debugAssert(!value.isEmptyOrUndefinedOrNull()); + this.weak = value; + }, + .strong => { + if (this.strong.get() != value) { + this.strong.set(globalThis, value); + } + }, + .finalized => { + bun.debugAssert(false); + }, + } + } }; const bun = @import("bun"); diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig index 3d961afa7a..d6ef0f2158 100644 --- a/src/bun.js/ipc.zig +++ b/src/bun.js/ipc.zig @@ -1087,7 +1087,7 @@ fn handleIPCMessage(send_queue: *SendQueue, message: DecodedIPCMessage, globalTh const fd: bun.FD = bun.take(&send_queue.incoming_fd).?; const target: bun.jsc.JSValue = switch (send_queue.owner) { - .subprocess => |subprocess| subprocess.this_jsvalue, + .subprocess => |subprocess| subprocess.this_value.tryGet() orelse .zero, .virtual_machine => bun.jsc.JSValue.null, }; From 668eba0eb855fbcbdc9200360ad35d0c9d62884c Mon Sep 17 00:00:00 2001 From: robobun Date: Mon, 27 Oct 2025 15:24:38 -0700 Subject: [PATCH 016/102] fix(node:http): Fix ServerResponse.writableNeedDrain causing stream pause (#24137) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes #19111 This PR fixes a bug where `fs.createReadStream().pipe(ServerResponse)` would fail to transfer data when ServerResponse had no handle (standalone usage). This affected Vite's static file serving and other middleware adapters using the connect-to-web pattern. ## Root Cause The bug was in the `ServerResponse.writableNeedDrain` getter at line 1529 of `_http_server.ts`: ```typescript return !this.destroyed && !this.finished && (this[kHandle]?.bufferedAmount ?? 1) !== 0; ``` When `ServerResponse` had no handle (which is common in middleware scenarios), the nullish coalescing operator defaulted `bufferedAmount` to **1** instead of **0**. This caused `writableNeedDrain` to always return `true`. ## Impact When `pipe()` checks `dest.writableNeedDrain === true`, it immediately pauses the source stream to handle backpressure. With the bug, standalone ServerResponse instances always appeared to need draining, causing piped streams to pause and never resume. ## Fix Changed the default value from `1` to `0`: ```typescript return !this.destroyed && !this.finished && (this[kHandle]?.bufferedAmount ?? 0) !== 0; ``` ## Test Plan - ✅ Added regression test in `test/regression/issue/19111.test.ts` - ✅ Verified fix with actual Vite middleware reproduction - ✅ Confirmed behavior matches Node.js Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot Co-authored-by: Claude --- src/js/node/_http_server.ts | 2 +- test/regression/issue/19111.test.ts | 99 +++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 test/regression/issue/19111.test.ts diff --git a/src/js/node/_http_server.ts b/src/js/node/_http_server.ts index 81d627e5f5..6ffcaaf6ad 100644 --- a/src/js/node/_http_server.ts +++ b/src/js/node/_http_server.ts @@ -1526,7 +1526,7 @@ ServerResponse.prototype._implicitHeader = function () { Object.defineProperty(ServerResponse.prototype, "writableNeedDrain", { get() { - return !this.destroyed && !this.finished && (this[kHandle]?.bufferedAmount ?? 1) !== 0; + return !this.destroyed && !this.finished && (this[kHandle]?.bufferedAmount ?? 0) !== 0; }, }); diff --git a/test/regression/issue/19111.test.ts b/test/regression/issue/19111.test.ts new file mode 100644 index 0000000000..43e9446904 --- /dev/null +++ b/test/regression/issue/19111.test.ts @@ -0,0 +1,99 @@ +// https://github.com/oven-sh/bun/issues/19111 +// stream.Readable's `readable` event not firing in Bun 1.2.6+ +import assert from "node:assert"; +import { IncomingMessage, ServerResponse } from "node:http"; +import { PassThrough, Readable } from "node:stream"; +import { test } from "node:test"; + +// Helper to create mock IncomingMessage +function createMockIncomingMessage(url: string): IncomingMessage { + return Object.assign(Readable.from([]), { + url, + method: "GET", + headers: {}, + }) as IncomingMessage; +} + +// Focused regression test: Standalone ServerResponse.writableNeedDrain should be false +test("Standalone ServerResponse.writableNeedDrain is false", () => { + const mockReq = createMockIncomingMessage("/need-drain"); + const res = new ServerResponse(mockReq); + + // Regression for #19111: previously true due to defaulting bufferedAmount to 1 + assert.strictEqual(res.writableNeedDrain, false); +}); + +// Helper function for connect-to-web pattern +function createServerResponse(incomingMessage: IncomingMessage) { + const res = new ServerResponse(incomingMessage); + const passThrough = new PassThrough(); + let resolved = false; + + const onReadable = new Promise<{ + readable: Readable; + headers: Record; + statusCode: number; + }>((resolve, reject) => { + const handleReadable = () => { + if (resolved) return; + resolved = true; + resolve({ + readable: passThrough, + headers: res.getHeaders(), + statusCode: res.statusCode, + }); + }; + + const handleError = (err: Error) => { + reject(err); + }; + + passThrough.once("readable", handleReadable); + passThrough.once("end", handleReadable); + passThrough.once("error", handleError); + res.once("error", handleError); + }); + + res.once("finish", () => { + passThrough.end(); + }); + + passThrough.on("drain", () => { + res.emit("drain"); + }); + + res.write = passThrough.write.bind(passThrough); + res.end = (passThrough as any).end.bind(passThrough); + + res.writeHead = function writeHead(statusCode: number, statusMessage?: string | any, headers?: any): ServerResponse { + res.statusCode = statusCode; + if (typeof statusMessage === "object") { + headers = statusMessage; + statusMessage = undefined; + } + if (headers) { + Object.entries(headers).forEach(([key, value]) => { + if (value !== undefined) { + res.setHeader(key, value); + } + }); + } + return res; + }; + + return { res, onReadable }; +} + +test("Readable.pipe(ServerResponse) flows without stalling (regression for #19111)", async () => { + const mockReq = createMockIncomingMessage("/pipe"); + const { res, onReadable } = createServerResponse(mockReq); + + // Pipe a readable source into ServerResponse; should not stall + const src = Readable.from(["Hello, ", "world!"]); + res.writeHead(200, { "Content-Type": "text/plain" }); + src.pipe(res); + + const out = await onReadable; + assert.strictEqual(out.statusCode, 200); + assert.strictEqual(out.headers["content-type"], "text/plain"); +}); From a0a69ee146b39b9ee80dffaaa9ad79cf87f7dff8 Mon Sep 17 00:00:00 2001 From: Felipe Cardozo Date: Mon, 27 Oct 2025 22:31:33 -0300 Subject: [PATCH 017/102] fix: body already used error to throw TypeError (#24114) Should fix https://github.com/oven-sh/bun/issues/24104 ### What does this PR do? This PR is changing `ERR_BODY_ALREADY_USED` to be TypeError instead of Error. ### How did you verify your code works? A test case added to verify that request call correctly throws a TypeError after another request call on the same Request, confirming the fix addresses the issue. --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/bun.js/bindings/ErrorCode.ts | 2 +- test/js/web/fetch/body-mixin-errors.test.ts | 25 +++++++++++++-------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index 31a3c28bac..e87e171e6e 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -20,7 +20,7 @@ const errors: ErrorCodeMapping = [ ["ERR_ASSERTION", Error], ["ERR_ASYNC_CALLBACK", TypeError], ["ERR_ASYNC_TYPE", TypeError], - ["ERR_BODY_ALREADY_USED", Error], + ["ERR_BODY_ALREADY_USED", TypeError], ["ERR_BORINGSSL", Error], ["ERR_ZSTD", Error], ["ERR_BROTLI_INVALID_PARAM", RangeError], diff --git a/test/js/web/fetch/body-mixin-errors.test.ts b/test/js/web/fetch/body-mixin-errors.test.ts index b7568e4dc4..5fce3d4a6c 100644 --- a/test/js/web/fetch/body-mixin-errors.test.ts +++ b/test/js/web/fetch/body-mixin-errors.test.ts @@ -1,17 +1,24 @@ import { describe, expect, it } from "bun:test"; describe("body-mixin-errors", () => { - it("should fail when bodyUsed", async () => { - var res = new Response("a"); - expect(res.bodyUsed).toBe(false); - await res.text(); - expect(res.bodyUsed).toBe(true); + it.concurrent.each([ + ["Response", () => new Response("a"), (b: Response | Request) => b.text()], + [ + "Request", + () => new Request("https://example.com", { body: "{}", method: "POST" }), + (b: Response | Request) => b.json(), + ], + ])("should throw TypeError when body already used on %s", async (type, createBody, secondCall) => { + const body = createBody(); + await body.text(); try { - await res.text(); - throw new Error("should not get here"); - } catch (e: any) { - expect(e.message).toBe("Body already used"); + await secondCall(body); + expect.unreachable("body is already used"); + } catch (err: any) { + expect(err.name).toBe("TypeError"); + expect(err.message).toBe("Body already used"); + expect(err instanceof TypeError).toBe(true); } }); }); From 523fc14d76454b8569542e54aa8c9793e22536e1 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 27 Oct 2025 18:58:02 -0700 Subject: [PATCH 018/102] Deflake websocket test --- test/js/web/websocket/websocket.test.js | 118 ++++++++++++------------ 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/test/js/web/websocket/websocket.test.js b/test/js/web/websocket/websocket.test.js index e40a2d17ac..7caee1a279 100644 --- a/test/js/web/websocket/websocket.test.js +++ b/test/js/web/websocket/websocket.test.js @@ -512,65 +512,6 @@ describe.concurrent("WebSocket", () => { await Promise.all([promise, promise2]); }); - it("instances should be finalized when GC'd", async () => { - let current_websocket_count = 0; - let initial_websocket_count = 0; - function getWebSocketCount() { - Bun.gc(true); - const objectTypeCounts = require("bun:jsc").heapStats().objectTypeCounts || { - WebSocket: 0, - }; - return objectTypeCounts.WebSocket || 0; - } - - async function run() { - using server = Bun.serve({ - port: 0, - fetch(req, server) { - return server.upgrade(req); - }, - websocket: { - open() {}, - data() {}, - message() {}, - drain() {}, - }, - }); - - function onOpen(sock, resolve) { - sock.addEventListener("close", resolve, { once: true }); - sock.close(); - } - - function openAndCloseWS() { - const { promise, resolve } = Promise.withResolvers(); - const sock = new WebSocket(server.url.href.replace("http", "ws")); - sock.addEventListener("open", onOpen.bind(undefined, sock, resolve), { - once: true, - }); - - return promise; - } - - for (let i = 0; i < 1000; i++) { - await openAndCloseWS(); - if (i % 100 === 0) { - if (initial_websocket_count === 0) { - initial_websocket_count = getWebSocketCount(); - } - } - } - } - await run(); - - // wait next tick to run the last time - await Bun.sleep(100); - current_websocket_count = getWebSocketCount(); - console.log({ current_websocket_count, initial_websocket_count }); - // expect that current and initial websocket be close to the same (normaly 1 or 2 difference) - expect(Math.abs(current_websocket_count - initial_websocket_count)).toBeLessThanOrEqual(50); - }); - it("should be able to send big messages", async () => { using serve = Bun.serve({ port: 0, @@ -865,3 +806,62 @@ it.concurrent("#16995", async () => { socket.close(); } }); + +it.serial("instances should be finalized when GC'd", async () => { + let current_websocket_count = 0; + let initial_websocket_count = 0; + function getWebSocketCount() { + Bun.gc(true); + const objectTypeCounts = require("bun:jsc").heapStats().objectTypeCounts || { + WebSocket: 0, + }; + return objectTypeCounts.WebSocket || 0; + } + + async function run() { + using server = Bun.serve({ + port: 0, + fetch(req, server) { + return server.upgrade(req); + }, + websocket: { + open() {}, + data() {}, + message() {}, + drain() {}, + }, + }); + + function onOpen(sock, resolve) { + sock.addEventListener("close", resolve, { once: true }); + sock.close(); + } + + function openAndCloseWS() { + const { promise, resolve } = Promise.withResolvers(); + const sock = new WebSocket(server.url.href.replace("http", "ws")); + sock.addEventListener("open", onOpen.bind(undefined, sock, resolve), { + once: true, + }); + + return promise; + } + + for (let i = 0; i < 1000; i++) { + await openAndCloseWS(); + if (i % 100 === 0) { + if (initial_websocket_count === 0) { + initial_websocket_count = getWebSocketCount(); + } + } + } + } + await run(); + + // wait next tick to run the last time + await Bun.sleep(100); + current_websocket_count = getWebSocketCount(); + console.log({ current_websocket_count, initial_websocket_count }); + // expect that current and initial websocket be close to the same (normaly 1 or 2 difference) + expect(Math.abs(current_websocket_count - initial_websocket_count)).toBeLessThanOrEqual(50); +}); From eb77bdd28662fce35a40aa2b5f50589aa4d070a4 Mon Sep 17 00:00:00 2001 From: robobun Date: Tue, 28 Oct 2025 00:05:16 -0700 Subject: [PATCH 019/102] Refactor: Split sourcemap.zig into separate struct files (#24141) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR refactors the sourcemap module by extracting large structs from `src/sourcemap/sourcemap.zig` into their own dedicated files, improving code organization and maintainability. ## Changes - **Extracted `ParsedSourceMap` struct** to `src/sourcemap/ParsedSourceMap.zig` - Made `SourceContentPtr` and related methods public - Made `standaloneModuleGraphData` public for external access - **Extracted `Chunk` struct** to `src/sourcemap/Chunk.zig` - Added import for `appendMappingToBuffer` from parent module - Includes all nested types: `VLQSourceMap`, `NewBuilder`, `Builder` - **Extracted `Mapping` struct** to `src/sourcemap/Mapping.zig` - Added necessary imports: `assert`, `ParseResult`, `debug` - Includes nested types: `MappingWithoutName`, `List`, `Lookup` - **Updated `src/sourcemap/sourcemap.zig`** - Replaced struct definitions with imports: `@import("./StructName.zig")` - Maintained all public APIs All structs now follow the `const StructName = @This()` pattern for top-level declarations. ## Testing - ✅ Compiled successfully with `bun bd` - ✅ All existing functionality preserved - ✅ No API changes - fully backwards compatible ## Before - Single 2000+ line file with multiple large structs - Difficult to navigate and maintain ## After - Modular structure with separate files for each major struct - Easier to find and modify specific functionality - Better code organization 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot Co-authored-by: Claude Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/sourcemap/Chunk.zig | 373 ++++++++++ src/sourcemap/Mapping.zig | 599 ++++++++++++++++ src/sourcemap/ParsedSourceMap.zig | 166 +++++ src/sourcemap/sourcemap.zig | 1101 +---------------------------- 4 files changed, 1141 insertions(+), 1098 deletions(-) create mode 100644 src/sourcemap/Chunk.zig create mode 100644 src/sourcemap/Mapping.zig create mode 100644 src/sourcemap/ParsedSourceMap.zig diff --git a/src/sourcemap/Chunk.zig b/src/sourcemap/Chunk.zig new file mode 100644 index 0000000000..59f236c5b5 --- /dev/null +++ b/src/sourcemap/Chunk.zig @@ -0,0 +1,373 @@ +const Chunk = @This(); + +buffer: MutableString, + +mappings_count: usize = 0, + +/// This end state will be used to rewrite the start of the following source +/// map chunk so that the delta-encoded VLQ numbers are preserved. +end_state: SourceMapState = .{}, + +/// There probably isn't a source mapping at the end of the file (nor should +/// there be) but if we're appending another source map chunk after this one, +/// we'll need to know how many characters were in the last line we generated. +final_generated_column: i32 = 0, + +/// ignore empty chunks +should_ignore: bool = true, + +pub fn initEmpty() Chunk { + return .{ + .buffer = MutableString.initEmpty(bun.default_allocator), + .mappings_count = 0, + .end_state = .{}, + .final_generated_column = 0, + .should_ignore = true, + }; +} + +pub fn deinit(this: *Chunk) void { + this.buffer.deinit(); +} + +pub fn printSourceMapContents( + chunk: Chunk, + source: *const Logger.Source, + mutable: *MutableString, + include_sources_contents: bool, + comptime ascii_only: bool, +) !void { + try printSourceMapContentsAtOffset( + chunk, + source, + mutable, + include_sources_contents, + 0, + ascii_only, + ); +} + +pub fn printSourceMapContentsAtOffset( + chunk: Chunk, + source: *const Logger.Source, + mutable: *MutableString, + include_sources_contents: bool, + offset: usize, + comptime ascii_only: bool, +) !void { + // attempt to pre-allocate + + var filename_buf: bun.PathBuffer = undefined; + var filename = source.path.text; + if (strings.hasPrefix(source.path.text, FileSystem.instance.top_level_dir)) { + filename = filename[FileSystem.instance.top_level_dir.len - 1 ..]; + } else if (filename.len > 0 and filename[0] != '/') { + filename_buf[0] = '/'; + @memcpy(filename_buf[1..][0..filename.len], filename); + filename = filename_buf[0 .. filename.len + 1]; + } + + mutable.growIfNeeded( + filename.len + 2 + (source.contents.len * @as(usize, @intFromBool(include_sources_contents))) + (chunk.buffer.list.items.len - offset) + 32 + 39 + 29 + 22 + 20, + ) catch unreachable; + try mutable.append("{\n \"version\":3,\n \"sources\": ["); + + try JSPrinter.quoteForJSON(filename, mutable, ascii_only); + + if (include_sources_contents) { + try mutable.append("],\n \"sourcesContent\": ["); + try JSPrinter.quoteForJSON(source.contents, mutable, ascii_only); + } + + try mutable.append("],\n \"mappings\": "); + try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], mutable, ascii_only); + try mutable.append(", \"names\": []\n}"); +} + +// TODO: remove the indirection by having generic functions for SourceMapFormat and NewBuilder. Source maps are always VLQ +pub fn SourceMapFormat(comptime Type: type) type { + return struct { + ctx: Type, + const Format = @This(); + + pub fn init(allocator: std.mem.Allocator, prepend_count: bool) Format { + return .{ .ctx = Type.init(allocator, prepend_count) }; + } + + pub inline fn appendLineSeparator(this: *Format) anyerror!void { + try this.ctx.appendLineSeparator(); + } + + pub inline fn append(this: *Format, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void { + try this.ctx.append(current_state, prev_state); + } + + pub inline fn shouldIgnore(this: Format) bool { + return this.ctx.shouldIgnore(); + } + + pub inline fn getBuffer(this: Format) MutableString { + return this.ctx.getBuffer(); + } + + pub inline fn takeBuffer(this: *Format) MutableString { + return this.ctx.takeBuffer(); + } + + pub inline fn getCount(this: Format) usize { + return this.ctx.getCount(); + } + }; +} + +pub const VLQSourceMap = struct { + data: MutableString, + count: usize = 0, + offset: usize = 0, + approximate_input_line_count: usize = 0, + + pub fn init(allocator: std.mem.Allocator, prepend_count: bool) VLQSourceMap { + var map = VLQSourceMap{ + .data = MutableString.initEmpty(allocator), + }; + + // For bun.js, we store the number of mappings and how many bytes the final list is at the beginning of the array + if (prepend_count) { + map.offset = 24; + map.data.append(&([_]u8{0} ** 24)) catch unreachable; + } + + return map; + } + + pub fn appendLineSeparator(this: *VLQSourceMap) anyerror!void { + try this.data.appendChar(';'); + } + + pub fn append(this: *VLQSourceMap, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void { + const last_byte: u8 = if (this.data.list.items.len > this.offset) + this.data.list.items[this.data.list.items.len - 1] + else + 0; + + appendMappingToBuffer(&this.data, last_byte, prev_state, current_state); + this.count += 1; + } + + pub fn shouldIgnore(this: VLQSourceMap) bool { + return this.count == 0; + } + + pub fn getBuffer(this: VLQSourceMap) MutableString { + return this.data; + } + + pub fn takeBuffer(this: *VLQSourceMap) MutableString { + defer this.data = .initEmpty(this.data.allocator); + return this.data; + } + + pub fn getCount(this: VLQSourceMap) usize { + return this.count; + } +}; + +pub fn NewBuilder(comptime SourceMapFormatType: type) type { + return struct { + const ThisBuilder = @This(); + source_map: SourceMapper, + line_offset_tables: LineOffsetTable.List = .{}, + prev_state: SourceMapState = SourceMapState{}, + last_generated_update: u32 = 0, + generated_column: i32 = 0, + prev_loc: Logger.Loc = Logger.Loc.Empty, + has_prev_state: bool = false, + + line_offset_table_byte_offset_list: []const u32 = &.{}, + + // This is a workaround for a bug in the popular "source-map" library: + // https://github.com/mozilla/source-map/issues/261. The library will + // sometimes return null when querying a source map unless every line + // starts with a mapping at column zero. + // + // The workaround is to replicate the previous mapping if a line ends + // up not starting with a mapping. This is done lazily because we want + // to avoid replicating the previous mapping if we don't need to. + line_starts_with_mapping: bool = false, + cover_lines_without_mappings: bool = false, + + approximate_input_line_count: usize = 0, + + /// When generating sourcemappings for bun, we store a count of how many mappings there were + prepend_count: bool = false, + + pub const SourceMapper = SourceMapFormat(SourceMapFormatType); + + pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) Chunk { + b.updateGeneratedLineAndColumn(output); + var buffer = b.source_map.getBuffer(); + if (b.prepend_count) { + buffer.list.items[0..8].* = @as([8]u8, @bitCast(buffer.list.items.len)); + buffer.list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); + buffer.list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); + } + return Chunk{ + .buffer = b.source_map.takeBuffer(), + .mappings_count = b.source_map.getCount(), + .end_state = b.prev_state, + .final_generated_column = b.generated_column, + .should_ignore = b.source_map.shouldIgnore(), + }; + } + + // Scan over the printed text since the last source mapping and update the + // generated line and column numbers + pub fn updateGeneratedLineAndColumn(b: *ThisBuilder, output: []const u8) void { + const slice = output[b.last_generated_update..]; + var needs_mapping = b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.has_prev_state; + + var i: usize = 0; + const n = @as(usize, @intCast(slice.len)); + var c: i32 = 0; + while (i < n) { + const len = strings.wtf8ByteSequenceLengthWithInvalid(slice[i]); + c = strings.decodeWTF8RuneT(slice[i..].ptr[0..4], len, i32, strings.unicode_replacement); + i += @as(usize, len); + + switch (c) { + 14...127 => { + if (strings.indexOfNewlineOrNonASCII(slice, @as(u32, @intCast(i)))) |j| { + b.generated_column += @as(i32, @intCast((@as(usize, j) - i) + 1)); + i = j; + continue; + } else { + b.generated_column += @as(i32, @intCast(slice[i..].len)) + 1; + i = n; + break; + } + }, + '\r', '\n', 0x2028, 0x2029 => { + // windows newline + if (c == '\r') { + const newline_check = b.last_generated_update + i + 1; + if (newline_check < output.len and output[newline_check] == '\n') { + continue; + } + } + + // If we're about to move to the next line and the previous line didn't have + // any mappings, add a mapping at the start of the previous line. + if (needs_mapping) { + b.appendMappingWithoutRemapping(.{ + .generated_line = b.prev_state.generated_line, + .generated_column = 0, + .source_index = b.prev_state.source_index, + .original_line = b.prev_state.original_line, + .original_column = b.prev_state.original_column, + }); + } + + b.prev_state.generated_line += 1; + b.prev_state.generated_column = 0; + b.generated_column = 0; + b.source_map.appendLineSeparator() catch unreachable; + + // This new line doesn't have a mapping yet + b.line_starts_with_mapping = false; + + needs_mapping = b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.has_prev_state; + }, + + else => { + // Mozilla's "source-map" library counts columns using UTF-16 code units + b.generated_column += @as(i32, @intFromBool(c > 0xFFFF)) + 1; + }, + } + } + + b.last_generated_update = @as(u32, @truncate(output.len)); + } + + pub fn appendMapping(b: *ThisBuilder, current_state: SourceMapState) void { + b.appendMappingWithoutRemapping(current_state); + } + + pub fn appendMappingWithoutRemapping(b: *ThisBuilder, current_state: SourceMapState) void { + b.source_map.append(current_state, b.prev_state) catch unreachable; + b.prev_state = current_state; + b.has_prev_state = true; + } + + pub fn addSourceMapping(b: *ThisBuilder, loc: Logger.Loc, output: []const u8) void { + if ( + // don't insert mappings for same location twice + b.prev_loc.eql(loc) or + // exclude generated code from source + loc.start == Logger.Loc.Empty.start) + return; + + b.prev_loc = loc; + const list = b.line_offset_tables; + + // We have no sourcemappings. + // This happens for example when importing an asset which does not support sourcemaps + // like a png or a jpg + // + // import foo from "./foo.png"; + // + if (list.len == 0) { + return; + } + + const original_line = LineOffsetTable.findLine(b.line_offset_table_byte_offset_list, loc); + const line = list.get(@as(usize, @intCast(@max(original_line, 0)))); + + // Use the line to compute the column + var original_column = loc.start - @as(i32, @intCast(line.byte_offset_to_start_of_line)); + if (line.columns_for_non_ascii.len > 0 and original_column >= @as(i32, @intCast(line.byte_offset_to_first_non_ascii))) { + original_column = line.columns_for_non_ascii.slice()[@as(u32, @intCast(original_column)) - line.byte_offset_to_first_non_ascii]; + } + + b.updateGeneratedLineAndColumn(output); + + // If this line doesn't start with a mapping and we're about to add a mapping + // that's not at the start, insert a mapping first so the line starts with one. + if (b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.generated_column > 0 and b.has_prev_state) { + b.appendMappingWithoutRemapping(.{ + .generated_line = b.prev_state.generated_line, + .generated_column = 0, + .source_index = b.prev_state.source_index, + .original_line = b.prev_state.original_line, + .original_column = b.prev_state.original_column, + }); + } + + b.appendMapping(.{ + .generated_line = b.prev_state.generated_line, + .generated_column = @max(b.generated_column, 0), + .source_index = b.prev_state.source_index, + .original_line = @max(original_line, 0), + .original_column = @max(original_column, 0), + }); + + // This line now has a mapping on it, so don't insert another one + b.line_starts_with_mapping = true; + } + }; +} + +pub const Builder = NewBuilder(VLQSourceMap); + +const std = @import("std"); + +const SourceMap = @import("./sourcemap.zig"); +const LineOffsetTable = SourceMap.LineOffsetTable; +const SourceMapState = SourceMap.SourceMapState; +const appendMappingToBuffer = SourceMap.appendMappingToBuffer; + +const bun = @import("bun"); +const JSPrinter = bun.js_printer; +const Logger = bun.logger; +const MutableString = bun.MutableString; +const strings = bun.strings; +const FileSystem = bun.fs.FileSystem; diff --git a/src/sourcemap/Mapping.zig b/src/sourcemap/Mapping.zig new file mode 100644 index 0000000000..bbd8f0ede6 --- /dev/null +++ b/src/sourcemap/Mapping.zig @@ -0,0 +1,599 @@ +const Mapping = @This(); + +const debug = bun.Output.scoped(.SourceMap, .visible); + +generated: LineColumnOffset, +original: LineColumnOffset, +source_index: i32, +name_index: i32 = -1, + +/// Optimization: if we don't care about the "names" column, then don't store the names. +pub const MappingWithoutName = struct { + generated: LineColumnOffset, + original: LineColumnOffset, + source_index: i32, + + pub fn toNamed(this: *const MappingWithoutName) Mapping { + return .{ + .generated = this.generated, + .original = this.original, + .source_index = this.source_index, + .name_index = -1, + }; + } +}; + +pub const List = struct { + impl: Value = .{ .without_names = .{} }, + names: []const bun.Semver.String = &[_]bun.Semver.String{}, + names_buffer: bun.ByteList = .{}, + + pub const Value = union(enum) { + without_names: bun.MultiArrayList(MappingWithoutName), + with_names: bun.MultiArrayList(Mapping), + + pub fn memoryCost(this: *const Value) usize { + return switch (this.*) { + .without_names => |*list| list.memoryCost(), + .with_names => |*list| list.memoryCost(), + }; + } + + pub fn ensureTotalCapacity(this: *Value, allocator: std.mem.Allocator, count: usize) !void { + switch (this.*) { + inline else => |*list| try list.ensureTotalCapacity(allocator, count), + } + } + }; + + fn ensureWithNames(this: *List, allocator: std.mem.Allocator) !void { + if (this.impl == .with_names) return; + + var without_names = this.impl.without_names; + var with_names = bun.MultiArrayList(Mapping){}; + try with_names.ensureTotalCapacity(allocator, without_names.len); + defer without_names.deinit(allocator); + + with_names.len = without_names.len; + var old_slices = without_names.slice(); + var new_slices = with_names.slice(); + + @memcpy(new_slices.items(.generated), old_slices.items(.generated)); + @memcpy(new_slices.items(.original), old_slices.items(.original)); + @memcpy(new_slices.items(.source_index), old_slices.items(.source_index)); + @memset(new_slices.items(.name_index), -1); + + this.impl = .{ .with_names = with_names }; + } + + fn findIndexFromGenerated(line_column_offsets: []const LineColumnOffset, line: bun.Ordinal, column: bun.Ordinal) ?usize { + var count = line_column_offsets.len; + var index: usize = 0; + while (count > 0) { + const step = count / 2; + const i: usize = index + step; + const mapping = line_column_offsets[i]; + if (mapping.lines.zeroBased() < line.zeroBased() or (mapping.lines.zeroBased() == line.zeroBased() and mapping.columns.zeroBased() <= column.zeroBased())) { + index = i + 1; + count -|= step + 1; + } else { + count = step; + } + } + + if (index > 0) { + if (line_column_offsets[index - 1].lines.zeroBased() == line.zeroBased()) { + return index - 1; + } + } + + return null; + } + + pub fn findIndex(this: *const List, line: bun.Ordinal, column: bun.Ordinal) ?usize { + switch (this.impl) { + inline else => |*list| { + if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { + return i; + } + }, + } + + return null; + } + + const SortContext = struct { + generated: []const LineColumnOffset, + pub fn lessThan(ctx: SortContext, a_index: usize, b_index: usize) bool { + const a = ctx.generated[a_index]; + const b = ctx.generated[b_index]; + + return a.lines.zeroBased() < b.lines.zeroBased() or (a.lines.zeroBased() == b.lines.zeroBased() and a.columns.zeroBased() <= b.columns.zeroBased()); + } + }; + + pub fn sort(this: *List) void { + switch (this.impl) { + .without_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), + .with_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), + } + } + + pub fn append(this: *List, allocator: std.mem.Allocator, mapping: *const Mapping) !void { + switch (this.impl) { + .without_names => |*list| { + try list.append(allocator, .{ + .generated = mapping.generated, + .original = mapping.original, + .source_index = mapping.source_index, + }); + }, + .with_names => |*list| { + try list.append(allocator, mapping.*); + }, + } + } + + pub fn find(this: *const List, line: bun.Ordinal, column: bun.Ordinal) ?Mapping { + switch (this.impl) { + inline else => |*list, tag| { + if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { + if (tag == .without_names) { + return list.get(i).toNamed(); + } else { + return list.get(i); + } + } + }, + } + + return null; + } + pub fn generated(self: *const List) []const LineColumnOffset { + return switch (self.impl) { + inline else => |*list| list.items(.generated), + }; + } + + pub fn original(self: *const List) []const LineColumnOffset { + return switch (self.impl) { + inline else => |*list| list.items(.original), + }; + } + + pub fn sourceIndex(self: *const List) []const i32 { + return switch (self.impl) { + inline else => |*list| list.items(.source_index), + }; + } + + pub fn nameIndex(self: *const List) []const i32 { + return switch (self.impl) { + inline else => |*list| list.items(.name_index), + }; + } + + pub fn deinit(self: *List, allocator: std.mem.Allocator) void { + switch (self.impl) { + inline else => |*list| list.deinit(allocator), + } + + self.names_buffer.deinit(allocator); + allocator.free(self.names); + } + + pub fn getName(this: *List, index: i32) ?[]const u8 { + if (index < 0) return null; + const i: usize = @intCast(index); + + if (i >= this.names.len) return null; + + if (this.impl == .with_names) { + const str: *const bun.Semver.String = &this.names[i]; + return str.slice(this.names_buffer.slice()); + } + + return null; + } + + pub fn memoryCost(this: *const List) usize { + return this.impl.memoryCost() + this.names_buffer.memoryCost() + + (this.names.len * @sizeOf(bun.Semver.String)); + } + + pub fn ensureTotalCapacity(this: *List, allocator: std.mem.Allocator, count: usize) !void { + try this.impl.ensureTotalCapacity(allocator, count); + } +}; + +pub const Lookup = struct { + mapping: Mapping, + source_map: ?*ParsedSourceMap = null, + /// Owned by default_allocator always + /// use `getSourceCode` to access this as a Slice + prefetched_source_code: ?[]const u8, + + name: ?[]const u8 = null, + + /// This creates a bun.String if the source remap *changes* the source url, + /// which is only possible if the executed file differs from the source file: + /// + /// - `bun build --sourcemap`, it is another file on disk + /// - `bun build --compile --sourcemap`, it is an embedded file. + pub fn displaySourceURLIfNeeded(lookup: Lookup, base_filename: []const u8) ?bun.String { + const source_map = lookup.source_map orelse return null; + // See doc comment on `external_source_names` + if (source_map.external_source_names.len == 0) + return null; + if (lookup.mapping.source_index >= source_map.external_source_names.len) + return null; + + const name = source_map.external_source_names[@intCast(lookup.mapping.source_index)]; + + if (source_map.is_standalone_module_graph) { + return bun.String.cloneUTF8(name); + } + + if (std.fs.path.isAbsolute(base_filename)) { + const dir = bun.path.dirname(base_filename, .auto); + return bun.String.cloneUTF8(bun.path.joinAbs(dir, .auto, name)); + } + + return bun.String.init(name); + } + + /// Only valid if `lookup.source_map.isExternal()` + /// This has the possibility of invoking a call to the filesystem. + /// + /// This data is freed after printed on the assumption that printing + /// errors to the console are rare (this isnt used for error.stack) + pub fn getSourceCode(lookup: Lookup, base_filename: []const u8) ?bun.jsc.ZigString.Slice { + const bytes = bytes: { + if (lookup.prefetched_source_code) |code| { + break :bytes code; + } + + const source_map = lookup.source_map orelse return null; + assert(source_map.isExternal()); + + const provider = source_map.underlying_provider.provider() orelse + return null; + + const index = lookup.mapping.source_index; + + // Standalone module graph source maps are stored (in memory) compressed. + // They are decompressed on demand. + if (source_map.is_standalone_module_graph) { + const serialized = source_map.standaloneModuleGraphData(); + if (index >= source_map.external_source_names.len) + return null; + + const code = serialized.sourceFileContents(@intCast(index)); + + return bun.jsc.ZigString.Slice.fromUTF8NeverFree(code orelse return null); + } + + if (provider.getSourceMap( + base_filename, + source_map.underlying_provider.load_hint, + .{ .source_only = @intCast(index) }, + )) |parsed| + if (parsed.source_contents) |contents| + break :bytes contents; + + if (index >= source_map.external_source_names.len) + return null; + + const name = source_map.external_source_names[@intCast(index)]; + + var buf: bun.PathBuffer = undefined; + const normalized = bun.path.joinAbsStringBufZ( + bun.path.dirname(base_filename, .auto), + &buf, + &.{name}, + .loose, + ); + switch (bun.sys.File.readFrom( + std.fs.cwd(), + normalized, + bun.default_allocator, + )) { + .result => |r| break :bytes r, + .err => return null, + } + }; + + return bun.jsc.ZigString.Slice.init(bun.default_allocator, bytes); + } +}; + +pub inline fn generatedLine(mapping: *const Mapping) i32 { + return mapping.generated.lines.zeroBased(); +} + +pub inline fn generatedColumn(mapping: *const Mapping) i32 { + return mapping.generated.columns.zeroBased(); +} + +pub inline fn sourceIndex(mapping: *const Mapping) i32 { + return mapping.source_index; +} + +pub inline fn originalLine(mapping: *const Mapping) i32 { + return mapping.original.lines.zeroBased(); +} + +pub inline fn originalColumn(mapping: *const Mapping) i32 { + return mapping.original.columns.zeroBased(); +} + +pub inline fn nameIndex(mapping: *const Mapping) i32 { + return mapping.name_index; +} + +pub fn parse( + allocator: std.mem.Allocator, + bytes: []const u8, + estimated_mapping_count: ?usize, + sources_count: i32, + input_line_count: usize, + options: struct { + allow_names: bool = false, + sort: bool = false, + }, +) ParseResult { + debug("parse mappings ({d} bytes)", .{bytes.len}); + + var mapping = Mapping.List{}; + errdefer mapping.deinit(allocator); + + if (estimated_mapping_count) |count| { + mapping.ensureTotalCapacity(allocator, count) catch { + return .{ + .fail = .{ + .msg = "Out of memory", + .err = error.OutOfMemory, + .loc = .{}, + }, + }; + }; + } + + var generated = LineColumnOffset{ .lines = bun.Ordinal.start, .columns = bun.Ordinal.start }; + var original = LineColumnOffset{ .lines = bun.Ordinal.start, .columns = bun.Ordinal.start }; + var name_index: i32 = 0; + var source_index: i32 = 0; + var needs_sort = false; + var remain = bytes; + var has_names = false; + while (remain.len > 0) { + if (remain[0] == ';') { + generated.columns = bun.Ordinal.start; + + while (strings.hasPrefixComptime( + remain, + comptime [_]u8{';'} ** (@sizeOf(usize) / 2), + )) { + generated.lines = generated.lines.addScalar(@sizeOf(usize) / 2); + remain = remain[@sizeOf(usize) / 2 ..]; + } + + while (remain.len > 0 and remain[0] == ';') { + generated.lines = generated.lines.addScalar(1); + remain = remain[1..]; + } + + if (remain.len == 0) { + break; + } + } + + // Read the generated column + const generated_column_delta = decodeVLQ(remain, 0); + + if (generated_column_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Missing generated column value", + .err = error.MissingGeneratedColumnValue, + .value = generated.columns.zeroBased(), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + + needs_sort = needs_sort or generated_column_delta.value < 0; + + generated.columns = generated.columns.addScalar(generated_column_delta.value); + if (generated.columns.zeroBased() < 0) { + return .{ + .fail = .{ + .msg = "Invalid generated column value", + .err = error.InvalidGeneratedColumnValue, + .value = generated.columns.zeroBased(), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + + remain = remain[generated_column_delta.start..]; + + // According to the specification, it's valid for a mapping to have 1, + // 4, or 5 variable-length fields. Having one field means there's no + // original location information, which is pretty useless. Just ignore + // those entries. + if (remain.len == 0) + break; + + switch (remain[0]) { + ',' => { + remain = remain[1..]; + continue; + }, + ';' => { + continue; + }, + else => {}, + } + + // Read the original source + const source_index_delta = decodeVLQ(remain, 0); + if (source_index_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Invalid source index delta", + .err = error.InvalidSourceIndexDelta, + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + source_index += source_index_delta.value; + + if (source_index < 0 or source_index > sources_count) { + return .{ + .fail = .{ + .msg = "Invalid source index value", + .err = error.InvalidSourceIndexValue, + .value = source_index, + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + remain = remain[source_index_delta.start..]; + + // Read the original line + const original_line_delta = decodeVLQ(remain, 0); + if (original_line_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Missing original line", + .err = error.MissingOriginalLine, + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + + original.lines = original.lines.addScalar(original_line_delta.value); + if (original.lines.zeroBased() < 0) { + return .{ + .fail = .{ + .msg = "Invalid original line value", + .err = error.InvalidOriginalLineValue, + .value = original.lines.zeroBased(), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + remain = remain[original_line_delta.start..]; + + // Read the original column + const original_column_delta = decodeVLQ(remain, 0); + if (original_column_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Missing original column value", + .err = error.MissingOriginalColumnValue, + .value = original.columns.zeroBased(), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + + original.columns = original.columns.addScalar(original_column_delta.value); + if (original.columns.zeroBased() < 0) { + return .{ + .fail = .{ + .msg = "Invalid original column value", + .err = error.InvalidOriginalColumnValue, + .value = original.columns.zeroBased(), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + remain = remain[original_column_delta.start..]; + + if (remain.len > 0) { + switch (remain[0]) { + ',' => { + // 4 column, but there's more on this line. + remain = remain[1..]; + }, + // 4 column, and there's no more on this line. + ';' => {}, + + // 5th column: the name + else => |c| { + // Read the name index + const name_index_delta = decodeVLQ(remain, 0); + if (name_index_delta.start == 0) { + return .{ + .fail = .{ + .msg = "Invalid name index delta", + .err = error.InvalidNameIndexDelta, + .value = @intCast(c), + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + } + remain = remain[name_index_delta.start..]; + + if (options.allow_names) { + name_index += name_index_delta.value; + if (!has_names) { + mapping.ensureWithNames(allocator) catch { + return .{ + .fail = .{ + .msg = "Out of memory", + .err = error.OutOfMemory, + .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, + }, + }; + }; + } + has_names = true; + } + + if (remain.len > 0) { + switch (remain[0]) { + // There's more on this line. + ',' => { + remain = remain[1..]; + }, + // That's the end of the line. + ';' => {}, + else => {}, + } + } + }, + } + } + mapping.append(allocator, &.{ + .generated = generated, + .original = original, + .source_index = source_index, + .name_index = name_index, + }) catch |err| bun.handleOom(err); + } + + if (needs_sort and options.sort) { + mapping.sort(); + } + + return .{ .success = .{ + .ref_count = .init(), + .mappings = mapping, + .input_line_count = input_line_count, + } }; +} + +const std = @import("std"); + +const SourceMap = @import("./sourcemap.zig"); +const LineColumnOffset = SourceMap.LineColumnOffset; +const ParseResult = SourceMap.ParseResult; +const ParsedSourceMap = SourceMap.ParsedSourceMap; +const decodeVLQ = SourceMap.VLQ.decode; + +const bun = @import("bun"); +const assert = bun.assert; +const strings = bun.strings; diff --git a/src/sourcemap/ParsedSourceMap.zig b/src/sourcemap/ParsedSourceMap.zig new file mode 100644 index 0000000000..b774d00f03 --- /dev/null +++ b/src/sourcemap/ParsedSourceMap.zig @@ -0,0 +1,166 @@ +const ParsedSourceMap = @This(); + +const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{}); +pub const ref = RefCount.ref; +pub const deref = RefCount.deref; + +/// ParsedSourceMap can be acquired by different threads via the thread-safe +/// source map store (SavedSourceMap), so the reference count must be thread-safe. +ref_count: RefCount, + +input_line_count: usize = 0, +mappings: Mapping.List = .{}, + +/// If this is empty, this implies that the source code is a single file +/// transpiled on-demand. If there are items, then it means this is a file +/// loaded without transpilation but with external sources. This array +/// maps `source_index` to the correct filename. +external_source_names: []const []const u8 = &.{}, +/// In order to load source contents from a source-map after the fact, +/// a handle to the underlying source provider is stored. Within this pointer, +/// a flag is stored if it is known to be an inline or external source map. +/// +/// Source contents are large, we don't preserve them in memory. This has +/// the downside of repeatedly re-decoding sourcemaps if multiple errors +/// are emitted (specifically with Bun.inspect / unhandled; the ones that +/// rely on source contents) +underlying_provider: SourceContentPtr = .none, + +is_standalone_module_graph: bool = false, + +const SourceProviderKind = enum(u2) { zig, bake, dev_server }; +const AnySourceProvider = union(enum) { + zig: *SourceProviderMap, + bake: *BakeSourceProvider, + dev_server: *DevServerSourceProvider, + + pub fn ptr(this: AnySourceProvider) *anyopaque { + return switch (this) { + .zig => @ptrCast(this.zig), + .bake => @ptrCast(this.bake), + .dev_server => @ptrCast(this.dev_server), + }; + } + + pub fn getSourceMap( + this: AnySourceProvider, + source_filename: []const u8, + load_hint: SourceMapLoadHint, + result: ParseUrlResultHint, + ) ?SourceMap.ParseUrl { + return switch (this) { + .zig => this.zig.getSourceMap(source_filename, load_hint, result), + .bake => this.bake.getSourceMap(source_filename, load_hint, result), + .dev_server => this.dev_server.getSourceMap(source_filename, load_hint, result), + }; + } +}; + +pub const SourceContentPtr = packed struct(u64) { + load_hint: SourceMapLoadHint, + kind: SourceProviderKind, + data: u60, + + pub const none: SourceContentPtr = .{ .load_hint = .none, .kind = .zig, .data = 0 }; + + pub fn fromProvider(p: *SourceProviderMap) SourceContentPtr { + return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .zig }; + } + + pub fn fromBakeProvider(p: *BakeSourceProvider) SourceContentPtr { + return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .bake }; + } + + pub fn fromDevServerProvider(p: *DevServerSourceProvider) SourceContentPtr { + return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .dev_server }; + } + + pub fn provider(sc: SourceContentPtr) ?AnySourceProvider { + switch (sc.kind) { + .zig => return .{ .zig = @ptrFromInt(sc.data) }, + .bake => return .{ .bake = @ptrFromInt(sc.data) }, + .dev_server => return .{ .dev_server = @ptrFromInt(sc.data) }, + } + } +}; + +pub fn isExternal(psm: *ParsedSourceMap) bool { + return psm.external_source_names.len != 0; +} + +fn deinit(this: *ParsedSourceMap) void { + const allocator = bun.default_allocator; + + this.mappings.deinit(allocator); + + if (this.external_source_names.len > 0) { + for (this.external_source_names) |name| + allocator.free(name); + allocator.free(this.external_source_names); + } + + bun.destroy(this); +} + +pub fn standaloneModuleGraphData(this: *ParsedSourceMap) *bun.StandaloneModuleGraph.SerializedSourceMap.Loaded { + bun.assert(this.is_standalone_module_graph); + return @ptrFromInt(this.underlying_provider.data); +} + +pub fn memoryCost(this: *const ParsedSourceMap) usize { + return @sizeOf(ParsedSourceMap) + this.mappings.memoryCost() + this.external_source_names.len * @sizeOf([]const u8); +} + +pub fn writeVLQs(map: *const ParsedSourceMap, writer: anytype) !void { + var last_col: i32 = 0; + var last_src: i32 = 0; + var last_ol: i32 = 0; + var last_oc: i32 = 0; + var current_line: i32 = 0; + for ( + map.mappings.generated(), + map.mappings.original(), + map.mappings.sourceIndex(), + 0.., + ) |gen, orig, source_index, i| { + if (current_line != gen.lines.zeroBased()) { + assert(gen.lines.zeroBased() > current_line); + const inc = gen.lines.zeroBased() - current_line; + try writer.writeByteNTimes(';', @intCast(inc)); + current_line = gen.lines.zeroBased(); + last_col = 0; + } else if (i != 0) { + try writer.writeByte(','); + } + try VLQ.encode(gen.columns.zeroBased() - last_col).writeTo(writer); + last_col = gen.columns.zeroBased(); + try VLQ.encode(source_index - last_src).writeTo(writer); + last_src = source_index; + try VLQ.encode(orig.lines.zeroBased() - last_ol).writeTo(writer); + last_ol = orig.lines.zeroBased(); + try VLQ.encode(orig.columns.zeroBased() - last_oc).writeTo(writer); + last_oc = orig.columns.zeroBased(); + } +} + +pub fn formatVLQs(map: *const ParsedSourceMap) std.fmt.Formatter(formatVLQsImpl) { + return .{ .data = map }; +} + +fn formatVLQsImpl(map: *const ParsedSourceMap, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { + try map.writeVLQs(w); +} + +const std = @import("std"); + +const SourceMap = @import("./sourcemap.zig"); +const BakeSourceProvider = SourceMap.BakeSourceProvider; +const DevServerSourceProvider = SourceMap.DevServerSourceProvider; +const Mapping = SourceMap.Mapping; +const ParseUrlResultHint = SourceMap.ParseUrlResultHint; +const SourceMapLoadHint = SourceMap.SourceMapLoadHint; +const SourceProviderMap = SourceMap.SourceProviderMap; +const VLQ = SourceMap.VLQ; + +const bun = @import("bun"); +const assert = bun.assert; diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 5e9f6ff1f2..f452d59d07 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -250,591 +250,7 @@ pub fn parseJSON( } /// Corresponds to a segment in the "mappings" field of a sourcemap -pub const Mapping = struct { - generated: LineColumnOffset, - original: LineColumnOffset, - source_index: i32, - name_index: i32 = -1, - - /// Optimization: if we don't care about the "names" column, then don't store the names. - pub const MappingWithoutName = struct { - generated: LineColumnOffset, - original: LineColumnOffset, - source_index: i32, - - pub fn toNamed(this: *const MappingWithoutName) Mapping { - return .{ - .generated = this.generated, - .original = this.original, - .source_index = this.source_index, - .name_index = -1, - }; - } - }; - - pub const List = struct { - impl: Value = .{ .without_names = .{} }, - names: []const bun.Semver.String = &[_]bun.Semver.String{}, - names_buffer: bun.ByteList = .{}, - - pub const Value = union(enum) { - without_names: bun.MultiArrayList(MappingWithoutName), - with_names: bun.MultiArrayList(Mapping), - - pub fn memoryCost(this: *const Value) usize { - return switch (this.*) { - .without_names => |*list| list.memoryCost(), - .with_names => |*list| list.memoryCost(), - }; - } - - pub fn ensureTotalCapacity(this: *Value, allocator: std.mem.Allocator, count: usize) !void { - switch (this.*) { - inline else => |*list| try list.ensureTotalCapacity(allocator, count), - } - } - }; - - fn ensureWithNames(this: *List, allocator: std.mem.Allocator) !void { - if (this.impl == .with_names) return; - - var without_names = this.impl.without_names; - var with_names = bun.MultiArrayList(Mapping){}; - try with_names.ensureTotalCapacity(allocator, without_names.len); - defer without_names.deinit(allocator); - - with_names.len = without_names.len; - var old_slices = without_names.slice(); - var new_slices = with_names.slice(); - - @memcpy(new_slices.items(.generated), old_slices.items(.generated)); - @memcpy(new_slices.items(.original), old_slices.items(.original)); - @memcpy(new_slices.items(.source_index), old_slices.items(.source_index)); - @memset(new_slices.items(.name_index), -1); - - this.impl = .{ .with_names = with_names }; - } - - fn findIndexFromGenerated(line_column_offsets: []const LineColumnOffset, line: bun.Ordinal, column: bun.Ordinal) ?usize { - var count = line_column_offsets.len; - var index: usize = 0; - while (count > 0) { - const step = count / 2; - const i: usize = index + step; - const mapping = line_column_offsets[i]; - if (mapping.lines.zeroBased() < line.zeroBased() or (mapping.lines.zeroBased() == line.zeroBased() and mapping.columns.zeroBased() <= column.zeroBased())) { - index = i + 1; - count -|= step + 1; - } else { - count = step; - } - } - - if (index > 0) { - if (line_column_offsets[index - 1].lines.zeroBased() == line.zeroBased()) { - return index - 1; - } - } - - return null; - } - - pub fn findIndex(this: *const List, line: bun.Ordinal, column: bun.Ordinal) ?usize { - switch (this.impl) { - inline else => |*list| { - if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { - return i; - } - }, - } - - return null; - } - - const SortContext = struct { - generated: []const LineColumnOffset, - pub fn lessThan(ctx: SortContext, a_index: usize, b_index: usize) bool { - const a = ctx.generated[a_index]; - const b = ctx.generated[b_index]; - - return a.lines.zeroBased() < b.lines.zeroBased() or (a.lines.zeroBased() == b.lines.zeroBased() and a.columns.zeroBased() <= b.columns.zeroBased()); - } - }; - - pub fn sort(this: *List) void { - switch (this.impl) { - .without_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), - .with_names => |*list| list.sort(SortContext{ .generated = list.items(.generated) }), - } - } - - pub fn append(this: *List, allocator: std.mem.Allocator, mapping: *const Mapping) !void { - switch (this.impl) { - .without_names => |*list| { - try list.append(allocator, .{ - .generated = mapping.generated, - .original = mapping.original, - .source_index = mapping.source_index, - }); - }, - .with_names => |*list| { - try list.append(allocator, mapping.*); - }, - } - } - - pub fn find(this: *const List, line: bun.Ordinal, column: bun.Ordinal) ?Mapping { - switch (this.impl) { - inline else => |*list, tag| { - if (findIndexFromGenerated(list.items(.generated), line, column)) |i| { - if (tag == .without_names) { - return list.get(i).toNamed(); - } else { - return list.get(i); - } - } - }, - } - - return null; - } - pub fn generated(self: *const List) []const LineColumnOffset { - return switch (self.impl) { - inline else => |*list| list.items(.generated), - }; - } - - pub fn original(self: *const List) []const LineColumnOffset { - return switch (self.impl) { - inline else => |*list| list.items(.original), - }; - } - - pub fn sourceIndex(self: *const List) []const i32 { - return switch (self.impl) { - inline else => |*list| list.items(.source_index), - }; - } - - pub fn nameIndex(self: *const List) []const i32 { - return switch (self.impl) { - inline else => |*list| list.items(.name_index), - }; - } - - pub fn deinit(self: *List, allocator: std.mem.Allocator) void { - switch (self.impl) { - inline else => |*list| list.deinit(allocator), - } - - self.names_buffer.deinit(allocator); - allocator.free(self.names); - } - - pub fn getName(this: *List, index: i32) ?[]const u8 { - if (index < 0) return null; - const i: usize = @intCast(index); - - if (i >= this.names.len) return null; - - if (this.impl == .with_names) { - const str: *const bun.Semver.String = &this.names[i]; - return str.slice(this.names_buffer.slice()); - } - - return null; - } - - pub fn memoryCost(this: *const List) usize { - return this.impl.memoryCost() + this.names_buffer.memoryCost() + - (this.names.len * @sizeOf(bun.Semver.String)); - } - - pub fn ensureTotalCapacity(this: *List, allocator: std.mem.Allocator, count: usize) !void { - try this.impl.ensureTotalCapacity(allocator, count); - } - }; - - pub const Lookup = struct { - mapping: Mapping, - source_map: ?*ParsedSourceMap = null, - /// Owned by default_allocator always - /// use `getSourceCode` to access this as a Slice - prefetched_source_code: ?[]const u8, - - name: ?[]const u8 = null, - - /// This creates a bun.String if the source remap *changes* the source url, - /// which is only possible if the executed file differs from the source file: - /// - /// - `bun build --sourcemap`, it is another file on disk - /// - `bun build --compile --sourcemap`, it is an embedded file. - pub fn displaySourceURLIfNeeded(lookup: Lookup, base_filename: []const u8) ?bun.String { - const source_map = lookup.source_map orelse return null; - // See doc comment on `external_source_names` - if (source_map.external_source_names.len == 0) - return null; - if (lookup.mapping.source_index >= source_map.external_source_names.len) - return null; - - const name = source_map.external_source_names[@intCast(lookup.mapping.source_index)]; - - if (source_map.is_standalone_module_graph) { - return bun.String.cloneUTF8(name); - } - - if (std.fs.path.isAbsolute(base_filename)) { - const dir = bun.path.dirname(base_filename, .auto); - return bun.String.cloneUTF8(bun.path.joinAbs(dir, .auto, name)); - } - - return bun.String.init(name); - } - - /// Only valid if `lookup.source_map.isExternal()` - /// This has the possibility of invoking a call to the filesystem. - /// - /// This data is freed after printed on the assumption that printing - /// errors to the console are rare (this isnt used for error.stack) - pub fn getSourceCode(lookup: Lookup, base_filename: []const u8) ?bun.jsc.ZigString.Slice { - const bytes = bytes: { - if (lookup.prefetched_source_code) |code| { - break :bytes code; - } - - const source_map = lookup.source_map orelse return null; - assert(source_map.isExternal()); - - const provider = source_map.underlying_provider.provider() orelse - return null; - - const index = lookup.mapping.source_index; - - // Standalone module graph source maps are stored (in memory) compressed. - // They are decompressed on demand. - if (source_map.is_standalone_module_graph) { - const serialized = source_map.standaloneModuleGraphData(); - if (index >= source_map.external_source_names.len) - return null; - - const code = serialized.sourceFileContents(@intCast(index)); - - return bun.jsc.ZigString.Slice.fromUTF8NeverFree(code orelse return null); - } - - if (provider.getSourceMap( - base_filename, - source_map.underlying_provider.load_hint, - .{ .source_only = @intCast(index) }, - )) |parsed| - if (parsed.source_contents) |contents| - break :bytes contents; - - if (index >= source_map.external_source_names.len) - return null; - - const name = source_map.external_source_names[@intCast(index)]; - - var buf: bun.PathBuffer = undefined; - const normalized = bun.path.joinAbsStringBufZ( - bun.path.dirname(base_filename, .auto), - &buf, - &.{name}, - .loose, - ); - switch (bun.sys.File.readFrom( - std.fs.cwd(), - normalized, - bun.default_allocator, - )) { - .result => |r| break :bytes r, - .err => return null, - } - }; - - return bun.jsc.ZigString.Slice.init(bun.default_allocator, bytes); - } - }; - - pub inline fn generatedLine(mapping: *const Mapping) i32 { - return mapping.generated.lines.zeroBased(); - } - - pub inline fn generatedColumn(mapping: *const Mapping) i32 { - return mapping.generated.columns.zeroBased(); - } - - pub inline fn sourceIndex(mapping: *const Mapping) i32 { - return mapping.source_index; - } - - pub inline fn originalLine(mapping: *const Mapping) i32 { - return mapping.original.lines.zeroBased(); - } - - pub inline fn originalColumn(mapping: *const Mapping) i32 { - return mapping.original.columns.zeroBased(); - } - - pub inline fn nameIndex(mapping: *const Mapping) i32 { - return mapping.name_index; - } - - pub fn parse( - allocator: std.mem.Allocator, - bytes: []const u8, - estimated_mapping_count: ?usize, - sources_count: i32, - input_line_count: usize, - options: struct { - allow_names: bool = false, - sort: bool = false, - }, - ) ParseResult { - debug("parse mappings ({d} bytes)", .{bytes.len}); - - var mapping = Mapping.List{}; - errdefer mapping.deinit(allocator); - - if (estimated_mapping_count) |count| { - mapping.ensureTotalCapacity(allocator, count) catch { - return .{ - .fail = .{ - .msg = "Out of memory", - .err = error.OutOfMemory, - .loc = .{}, - }, - }; - }; - } - - var generated = LineColumnOffset{ .lines = bun.Ordinal.start, .columns = bun.Ordinal.start }; - var original = LineColumnOffset{ .lines = bun.Ordinal.start, .columns = bun.Ordinal.start }; - var name_index: i32 = 0; - var source_index: i32 = 0; - var needs_sort = false; - var remain = bytes; - var has_names = false; - while (remain.len > 0) { - if (remain[0] == ';') { - generated.columns = bun.Ordinal.start; - - while (strings.hasPrefixComptime( - remain, - comptime [_]u8{';'} ** (@sizeOf(usize) / 2), - )) { - generated.lines = generated.lines.addScalar(@sizeOf(usize) / 2); - remain = remain[@sizeOf(usize) / 2 ..]; - } - - while (remain.len > 0 and remain[0] == ';') { - generated.lines = generated.lines.addScalar(1); - remain = remain[1..]; - } - - if (remain.len == 0) { - break; - } - } - - // Read the generated column - const generated_column_delta = decodeVLQ(remain, 0); - - if (generated_column_delta.start == 0) { - return .{ - .fail = .{ - .msg = "Missing generated column value", - .err = error.MissingGeneratedColumnValue, - .value = generated.columns.zeroBased(), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - - needs_sort = needs_sort or generated_column_delta.value < 0; - - generated.columns = generated.columns.addScalar(generated_column_delta.value); - if (generated.columns.zeroBased() < 0) { - return .{ - .fail = .{ - .msg = "Invalid generated column value", - .err = error.InvalidGeneratedColumnValue, - .value = generated.columns.zeroBased(), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - - remain = remain[generated_column_delta.start..]; - - // According to the specification, it's valid for a mapping to have 1, - // 4, or 5 variable-length fields. Having one field means there's no - // original location information, which is pretty useless. Just ignore - // those entries. - if (remain.len == 0) - break; - - switch (remain[0]) { - ',' => { - remain = remain[1..]; - continue; - }, - ';' => { - continue; - }, - else => {}, - } - - // Read the original source - const source_index_delta = decodeVLQ(remain, 0); - if (source_index_delta.start == 0) { - return .{ - .fail = .{ - .msg = "Invalid source index delta", - .err = error.InvalidSourceIndexDelta, - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - source_index += source_index_delta.value; - - if (source_index < 0 or source_index > sources_count) { - return .{ - .fail = .{ - .msg = "Invalid source index value", - .err = error.InvalidSourceIndexValue, - .value = source_index, - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - remain = remain[source_index_delta.start..]; - - // Read the original line - const original_line_delta = decodeVLQ(remain, 0); - if (original_line_delta.start == 0) { - return .{ - .fail = .{ - .msg = "Missing original line", - .err = error.MissingOriginalLine, - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - - original.lines = original.lines.addScalar(original_line_delta.value); - if (original.lines.zeroBased() < 0) { - return .{ - .fail = .{ - .msg = "Invalid original line value", - .err = error.InvalidOriginalLineValue, - .value = original.lines.zeroBased(), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - remain = remain[original_line_delta.start..]; - - // Read the original column - const original_column_delta = decodeVLQ(remain, 0); - if (original_column_delta.start == 0) { - return .{ - .fail = .{ - .msg = "Missing original column value", - .err = error.MissingOriginalColumnValue, - .value = original.columns.zeroBased(), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - - original.columns = original.columns.addScalar(original_column_delta.value); - if (original.columns.zeroBased() < 0) { - return .{ - .fail = .{ - .msg = "Invalid original column value", - .err = error.InvalidOriginalColumnValue, - .value = original.columns.zeroBased(), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - remain = remain[original_column_delta.start..]; - - if (remain.len > 0) { - switch (remain[0]) { - ',' => { - // 4 column, but there's more on this line. - remain = remain[1..]; - }, - // 4 column, and there's no more on this line. - ';' => {}, - - // 5th column: the name - else => |c| { - // Read the name index - const name_index_delta = decodeVLQ(remain, 0); - if (name_index_delta.start == 0) { - return .{ - .fail = .{ - .msg = "Invalid name index delta", - .err = error.InvalidNameIndexDelta, - .value = @intCast(c), - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - } - remain = remain[name_index_delta.start..]; - - if (options.allow_names) { - name_index += name_index_delta.value; - if (!has_names) { - mapping.ensureWithNames(allocator) catch { - return .{ - .fail = .{ - .msg = "Out of memory", - .err = error.OutOfMemory, - .loc = .{ .start = @as(i32, @intCast(bytes.len - remain.len)) }, - }, - }; - }; - } - has_names = true; - } - - if (remain.len > 0) { - switch (remain[0]) { - // There's more on this line. - ',' => { - remain = remain[1..]; - }, - // That's the end of the line. - ';' => {}, - else => {}, - } - } - }, - } - } - mapping.append(allocator, &.{ - .generated = generated, - .original = original, - .source_index = source_index, - .name_index = name_index, - }) catch |err| bun.handleOom(err); - } - - if (needs_sort and options.sort) { - mapping.sort(); - } - - return .{ .success = .{ - .ref_count = .init(), - .mappings = mapping, - .input_line_count = input_line_count, - } }; - } -}; +pub const Mapping = @import("./Mapping.zig"); pub const ParseResult = union(enum) { fail: struct { @@ -859,158 +275,7 @@ pub const ParseResult = union(enum) { success: ParsedSourceMap, }; -pub const ParsedSourceMap = struct { - const RefCount = bun.ptr.ThreadSafeRefCount(@This(), "ref_count", deinit, .{}); - pub const ref = RefCount.ref; - pub const deref = RefCount.deref; - - /// ParsedSourceMap can be acquired by different threads via the thread-safe - /// source map store (SavedSourceMap), so the reference count must be thread-safe. - ref_count: RefCount, - - input_line_count: usize = 0, - mappings: Mapping.List = .{}, - - /// If this is empty, this implies that the source code is a single file - /// transpiled on-demand. If there are items, then it means this is a file - /// loaded without transpilation but with external sources. This array - /// maps `source_index` to the correct filename. - external_source_names: []const []const u8 = &.{}, - /// In order to load source contents from a source-map after the fact, - /// a handle to the underlying source provider is stored. Within this pointer, - /// a flag is stored if it is known to be an inline or external source map. - /// - /// Source contents are large, we don't preserve them in memory. This has - /// the downside of repeatedly re-decoding sourcemaps if multiple errors - /// are emitted (specifically with Bun.inspect / unhandled; the ones that - /// rely on source contents) - underlying_provider: SourceContentPtr = .none, - - is_standalone_module_graph: bool = false, - - const SourceProviderKind = enum(u2) { zig, bake, dev_server }; - const AnySourceProvider = union(enum) { - zig: *SourceProviderMap, - bake: *BakeSourceProvider, - dev_server: *DevServerSourceProvider, - - pub fn ptr(this: AnySourceProvider) *anyopaque { - return switch (this) { - .zig => @ptrCast(this.zig), - .bake => @ptrCast(this.bake), - .dev_server => @ptrCast(this.dev_server), - }; - } - - pub fn getSourceMap( - this: AnySourceProvider, - source_filename: []const u8, - load_hint: SourceMapLoadHint, - result: ParseUrlResultHint, - ) ?SourceMap.ParseUrl { - return switch (this) { - .zig => this.zig.getSourceMap(source_filename, load_hint, result), - .bake => this.bake.getSourceMap(source_filename, load_hint, result), - .dev_server => this.dev_server.getSourceMap(source_filename, load_hint, result), - }; - } - }; - - const SourceContentPtr = packed struct(u64) { - load_hint: SourceMapLoadHint, - kind: SourceProviderKind, - data: u60, - - pub const none: SourceContentPtr = .{ .load_hint = .none, .kind = .zig, .data = 0 }; - - fn fromProvider(p: *SourceProviderMap) SourceContentPtr { - return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .zig }; - } - - fn fromBakeProvider(p: *BakeSourceProvider) SourceContentPtr { - return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .bake }; - } - - fn fromDevServerProvider(p: *DevServerSourceProvider) SourceContentPtr { - return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .dev_server }; - } - - pub fn provider(sc: SourceContentPtr) ?AnySourceProvider { - switch (sc.kind) { - .zig => return .{ .zig = @ptrFromInt(sc.data) }, - .bake => return .{ .bake = @ptrFromInt(sc.data) }, - .dev_server => return .{ .dev_server = @ptrFromInt(sc.data) }, - } - } - }; - - pub fn isExternal(psm: *ParsedSourceMap) bool { - return psm.external_source_names.len != 0; - } - - fn deinit(this: *ParsedSourceMap) void { - const allocator = bun.default_allocator; - - this.mappings.deinit(allocator); - - if (this.external_source_names.len > 0) { - for (this.external_source_names) |name| - allocator.free(name); - allocator.free(this.external_source_names); - } - - bun.destroy(this); - } - - fn standaloneModuleGraphData(this: *ParsedSourceMap) *bun.StandaloneModuleGraph.SerializedSourceMap.Loaded { - bun.assert(this.is_standalone_module_graph); - return @ptrFromInt(this.underlying_provider.data); - } - - pub fn memoryCost(this: *const ParsedSourceMap) usize { - return @sizeOf(ParsedSourceMap) + this.mappings.memoryCost() + this.external_source_names.len * @sizeOf([]const u8); - } - - pub fn writeVLQs(map: *const ParsedSourceMap, writer: anytype) !void { - var last_col: i32 = 0; - var last_src: i32 = 0; - var last_ol: i32 = 0; - var last_oc: i32 = 0; - var current_line: i32 = 0; - for ( - map.mappings.generated(), - map.mappings.original(), - map.mappings.sourceIndex(), - 0.., - ) |gen, orig, source_index, i| { - if (current_line != gen.lines.zeroBased()) { - assert(gen.lines.zeroBased() > current_line); - const inc = gen.lines.zeroBased() - current_line; - try writer.writeByteNTimes(';', @intCast(inc)); - current_line = gen.lines.zeroBased(); - last_col = 0; - } else if (i != 0) { - try writer.writeByte(','); - } - try VLQ.encode(gen.columns.zeroBased() - last_col).writeTo(writer); - last_col = gen.columns.zeroBased(); - try VLQ.encode(source_index - last_src).writeTo(writer); - last_src = source_index; - try VLQ.encode(orig.lines.zeroBased() - last_ol).writeTo(writer); - last_ol = orig.lines.zeroBased(); - try VLQ.encode(orig.columns.zeroBased() - last_oc).writeTo(writer); - last_oc = orig.columns.zeroBased(); - } - } - - pub fn formatVLQs(map: *const ParsedSourceMap) std.fmt.Formatter(formatVLQsImpl) { - return .{ .data = map }; - } - - fn formatVLQsImpl(map: *const ParsedSourceMap, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { - try map.writeVLQs(w); - } -}; +pub const ParsedSourceMap = @import("./ParsedSourceMap.zig"); /// For some sourcemap loading code, this enum is used as a hint if it should /// bother loading source code into memory. Most uses of source maps only care @@ -1668,365 +933,7 @@ pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: } } -pub const Chunk = struct { - buffer: MutableString, - - mappings_count: usize = 0, - - /// This end state will be used to rewrite the start of the following source - /// map chunk so that the delta-encoded VLQ numbers are preserved. - end_state: SourceMapState = .{}, - - /// There probably isn't a source mapping at the end of the file (nor should - /// there be) but if we're appending another source map chunk after this one, - /// we'll need to know how many characters were in the last line we generated. - final_generated_column: i32 = 0, - - /// ignore empty chunks - should_ignore: bool = true, - - pub fn initEmpty() Chunk { - return .{ - .buffer = MutableString.initEmpty(bun.default_allocator), - .mappings_count = 0, - .end_state = .{}, - .final_generated_column = 0, - .should_ignore = true, - }; - } - - pub fn deinit(this: *Chunk) void { - this.buffer.deinit(); - } - - pub fn printSourceMapContents( - chunk: Chunk, - source: *const Logger.Source, - mutable: *MutableString, - include_sources_contents: bool, - comptime ascii_only: bool, - ) !void { - try printSourceMapContentsAtOffset( - chunk, - source, - mutable, - include_sources_contents, - 0, - ascii_only, - ); - } - - pub fn printSourceMapContentsAtOffset( - chunk: Chunk, - source: *const Logger.Source, - mutable: *MutableString, - include_sources_contents: bool, - offset: usize, - comptime ascii_only: bool, - ) !void { - // attempt to pre-allocate - - var filename_buf: bun.PathBuffer = undefined; - var filename = source.path.text; - if (strings.hasPrefix(source.path.text, FileSystem.instance.top_level_dir)) { - filename = filename[FileSystem.instance.top_level_dir.len - 1 ..]; - } else if (filename.len > 0 and filename[0] != '/') { - filename_buf[0] = '/'; - @memcpy(filename_buf[1..][0..filename.len], filename); - filename = filename_buf[0 .. filename.len + 1]; - } - - mutable.growIfNeeded( - filename.len + 2 + (source.contents.len * @as(usize, @intFromBool(include_sources_contents))) + (chunk.buffer.list.items.len - offset) + 32 + 39 + 29 + 22 + 20, - ) catch unreachable; - try mutable.append("{\n \"version\":3,\n \"sources\": ["); - - try JSPrinter.quoteForJSON(filename, mutable, ascii_only); - - if (include_sources_contents) { - try mutable.append("],\n \"sourcesContent\": ["); - try JSPrinter.quoteForJSON(source.contents, mutable, ascii_only); - } - - try mutable.append("],\n \"mappings\": "); - try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], mutable, ascii_only); - try mutable.append(", \"names\": []\n}"); - } - - // TODO: remove the indirection by having generic functions for SourceMapFormat and NewBuilder. Source maps are always VLQ - pub fn SourceMapFormat(comptime Type: type) type { - return struct { - ctx: Type, - const Format = @This(); - - pub fn init(allocator: std.mem.Allocator, prepend_count: bool) Format { - return .{ .ctx = Type.init(allocator, prepend_count) }; - } - - pub inline fn appendLineSeparator(this: *Format) anyerror!void { - try this.ctx.appendLineSeparator(); - } - - pub inline fn append(this: *Format, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void { - try this.ctx.append(current_state, prev_state); - } - - pub inline fn shouldIgnore(this: Format) bool { - return this.ctx.shouldIgnore(); - } - - pub inline fn getBuffer(this: Format) MutableString { - return this.ctx.getBuffer(); - } - - pub inline fn takeBuffer(this: *Format) MutableString { - return this.ctx.takeBuffer(); - } - - pub inline fn getCount(this: Format) usize { - return this.ctx.getCount(); - } - }; - } - - pub const VLQSourceMap = struct { - data: MutableString, - count: usize = 0, - offset: usize = 0, - approximate_input_line_count: usize = 0, - - pub fn init(allocator: std.mem.Allocator, prepend_count: bool) VLQSourceMap { - var map = VLQSourceMap{ - .data = MutableString.initEmpty(allocator), - }; - - // For bun.js, we store the number of mappings and how many bytes the final list is at the beginning of the array - if (prepend_count) { - map.offset = 24; - map.data.append(&([_]u8{0} ** 24)) catch unreachable; - } - - return map; - } - - pub fn appendLineSeparator(this: *VLQSourceMap) anyerror!void { - try this.data.appendChar(';'); - } - - pub fn append(this: *VLQSourceMap, current_state: SourceMapState, prev_state: SourceMapState) anyerror!void { - const last_byte: u8 = if (this.data.list.items.len > this.offset) - this.data.list.items[this.data.list.items.len - 1] - else - 0; - - appendMappingToBuffer(&this.data, last_byte, prev_state, current_state); - this.count += 1; - } - - pub fn shouldIgnore(this: VLQSourceMap) bool { - return this.count == 0; - } - - pub fn getBuffer(this: VLQSourceMap) MutableString { - return this.data; - } - - pub fn takeBuffer(this: *VLQSourceMap) MutableString { - defer this.data = .initEmpty(this.data.allocator); - return this.data; - } - - pub fn getCount(this: VLQSourceMap) usize { - return this.count; - } - }; - - pub fn NewBuilder(comptime SourceMapFormatType: type) type { - return struct { - const ThisBuilder = @This(); - source_map: SourceMapper, - line_offset_tables: LineOffsetTable.List = .{}, - prev_state: SourceMapState = SourceMapState{}, - last_generated_update: u32 = 0, - generated_column: i32 = 0, - prev_loc: Logger.Loc = Logger.Loc.Empty, - has_prev_state: bool = false, - - line_offset_table_byte_offset_list: []const u32 = &.{}, - - // This is a workaround for a bug in the popular "source-map" library: - // https://github.com/mozilla/source-map/issues/261. The library will - // sometimes return null when querying a source map unless every line - // starts with a mapping at column zero. - // - // The workaround is to replicate the previous mapping if a line ends - // up not starting with a mapping. This is done lazily because we want - // to avoid replicating the previous mapping if we don't need to. - line_starts_with_mapping: bool = false, - cover_lines_without_mappings: bool = false, - - approximate_input_line_count: usize = 0, - - /// When generating sourcemappings for bun, we store a count of how many mappings there were - prepend_count: bool = false, - - pub const SourceMapper = SourceMapFormat(SourceMapFormatType); - - pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) Chunk { - b.updateGeneratedLineAndColumn(output); - var buffer = b.source_map.getBuffer(); - if (b.prepend_count) { - buffer.list.items[0..8].* = @as([8]u8, @bitCast(buffer.list.items.len)); - buffer.list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); - buffer.list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); - } - return Chunk{ - .buffer = b.source_map.takeBuffer(), - .mappings_count = b.source_map.getCount(), - .end_state = b.prev_state, - .final_generated_column = b.generated_column, - .should_ignore = b.source_map.shouldIgnore(), - }; - } - - // Scan over the printed text since the last source mapping and update the - // generated line and column numbers - pub fn updateGeneratedLineAndColumn(b: *ThisBuilder, output: []const u8) void { - const slice = output[b.last_generated_update..]; - var needs_mapping = b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.has_prev_state; - - var i: usize = 0; - const n = @as(usize, @intCast(slice.len)); - var c: i32 = 0; - while (i < n) { - const len = strings.wtf8ByteSequenceLengthWithInvalid(slice[i]); - c = strings.decodeWTF8RuneT(slice[i..].ptr[0..4], len, i32, strings.unicode_replacement); - i += @as(usize, len); - - switch (c) { - 14...127 => { - if (strings.indexOfNewlineOrNonASCII(slice, @as(u32, @intCast(i)))) |j| { - b.generated_column += @as(i32, @intCast((@as(usize, j) - i) + 1)); - i = j; - continue; - } else { - b.generated_column += @as(i32, @intCast(slice[i..].len)) + 1; - i = n; - break; - } - }, - '\r', '\n', 0x2028, 0x2029 => { - // windows newline - if (c == '\r') { - const newline_check = b.last_generated_update + i + 1; - if (newline_check < output.len and output[newline_check] == '\n') { - continue; - } - } - - // If we're about to move to the next line and the previous line didn't have - // any mappings, add a mapping at the start of the previous line. - if (needs_mapping) { - b.appendMappingWithoutRemapping(.{ - .generated_line = b.prev_state.generated_line, - .generated_column = 0, - .source_index = b.prev_state.source_index, - .original_line = b.prev_state.original_line, - .original_column = b.prev_state.original_column, - }); - } - - b.prev_state.generated_line += 1; - b.prev_state.generated_column = 0; - b.generated_column = 0; - b.source_map.appendLineSeparator() catch unreachable; - - // This new line doesn't have a mapping yet - b.line_starts_with_mapping = false; - - needs_mapping = b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.has_prev_state; - }, - - else => { - // Mozilla's "source-map" library counts columns using UTF-16 code units - b.generated_column += @as(i32, @intFromBool(c > 0xFFFF)) + 1; - }, - } - } - - b.last_generated_update = @as(u32, @truncate(output.len)); - } - - pub fn appendMapping(b: *ThisBuilder, current_state: SourceMapState) void { - b.appendMappingWithoutRemapping(current_state); - } - - pub fn appendMappingWithoutRemapping(b: *ThisBuilder, current_state: SourceMapState) void { - b.source_map.append(current_state, b.prev_state) catch unreachable; - b.prev_state = current_state; - b.has_prev_state = true; - } - - pub fn addSourceMapping(b: *ThisBuilder, loc: Logger.Loc, output: []const u8) void { - if ( - // don't insert mappings for same location twice - b.prev_loc.eql(loc) or - // exclude generated code from source - loc.start == Logger.Loc.Empty.start) - return; - - b.prev_loc = loc; - const list = b.line_offset_tables; - - // We have no sourcemappings. - // This happens for example when importing an asset which does not support sourcemaps - // like a png or a jpg - // - // import foo from "./foo.png"; - // - if (list.len == 0) { - return; - } - - const original_line = LineOffsetTable.findLine(b.line_offset_table_byte_offset_list, loc); - const line = list.get(@as(usize, @intCast(@max(original_line, 0)))); - - // Use the line to compute the column - var original_column = loc.start - @as(i32, @intCast(line.byte_offset_to_start_of_line)); - if (line.columns_for_non_ascii.len > 0 and original_column >= @as(i32, @intCast(line.byte_offset_to_first_non_ascii))) { - original_column = line.columns_for_non_ascii.slice()[@as(u32, @intCast(original_column)) - line.byte_offset_to_first_non_ascii]; - } - - b.updateGeneratedLineAndColumn(output); - - // If this line doesn't start with a mapping and we're about to add a mapping - // that's not at the start, insert a mapping first so the line starts with one. - if (b.cover_lines_without_mappings and !b.line_starts_with_mapping and b.generated_column > 0 and b.has_prev_state) { - b.appendMappingWithoutRemapping(.{ - .generated_line = b.prev_state.generated_line, - .generated_column = 0, - .source_index = b.prev_state.source_index, - .original_line = b.prev_state.original_line, - .original_column = b.prev_state.original_column, - }); - } - - b.appendMapping(.{ - .generated_line = b.prev_state.generated_line, - .generated_column = @max(b.generated_column, 0), - .source_index = b.prev_state.source_index, - .original_line = @max(original_line, 0), - .original_column = @max(original_column, 0), - }); - - // This line now has a mapping on it, so don't insert another one - b.line_starts_with_mapping = true; - } - }; - } - - pub const Builder = NewBuilder(VLQSourceMap); -}; +pub const Chunk = @import("./Chunk.zig"); /// https://sentry.engineering/blog/the-case-for-debug-ids /// https://github.com/mitsuhiko/source-map-rfc/blob/proposals/debug-id/proposals/debug-id.md @@ -2058,11 +965,9 @@ const string = []const u8; const std = @import("std"); const bun = @import("bun"); -const JSPrinter = bun.js_printer; const Logger = bun.logger; const MutableString = bun.MutableString; const StringJoiner = bun.StringJoiner; const URL = bun.URL; const assert = bun.assert; const strings = bun.strings; -const FileSystem = bun.fs.FileSystem; From 51431b6e653534345fdec39de05e8d101872d3fd Mon Sep 17 00:00:00 2001 From: robobun Date: Tue, 28 Oct 2025 12:31:42 -0700 Subject: [PATCH 020/102] Fix sourcemap comparator to use strict weak ordering (#24146) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes the comparator function in `src/sourcemap/Mapping.zig` to use strict weak ordering as required by sort algorithms. ## Changes - Changed `<=` to `<` in the column comparison to ensure strict ordering - Refactored the comparator to use clearer if-statement structure - Added index comparison as a tiebreaker for stable sorting when both line and column positions are equal ## Problem The original comparator used `<=` which would return true for equal elements, violating the strict weak ordering requirement. This could lead to undefined behavior in sorting. **Before:** ```zig return a.lines.zeroBased() < b.lines.zeroBased() or (a.lines.zeroBased() == b.lines.zeroBased() and a.columns.zeroBased() <= b.columns.zeroBased()); ``` **After:** ```zig if (a.lines.zeroBased() != b.lines.zeroBased()) { return a.lines.zeroBased() < b.lines.zeroBased(); } if (a.columns.zeroBased() != b.columns.zeroBased()) { return a.columns.zeroBased() < b.columns.zeroBased(); } return a_index < b_index; ``` ## Test plan - [x] Verified compilation with `bun bd` - The sort now properly follows strict weak ordering semantics 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Bot Co-authored-by: Claude --- src/sourcemap/Mapping.zig | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/sourcemap/Mapping.zig b/src/sourcemap/Mapping.zig index bbd8f0ede6..971a5d45c7 100644 --- a/src/sourcemap/Mapping.zig +++ b/src/sourcemap/Mapping.zig @@ -108,7 +108,13 @@ pub const List = struct { const a = ctx.generated[a_index]; const b = ctx.generated[b_index]; - return a.lines.zeroBased() < b.lines.zeroBased() or (a.lines.zeroBased() == b.lines.zeroBased() and a.columns.zeroBased() <= b.columns.zeroBased()); + if (a.lines.zeroBased() != b.lines.zeroBased()) { + return a.lines.zeroBased() < b.lines.zeroBased(); + } + if (a.columns.zeroBased() != b.columns.zeroBased()) { + return a.columns.zeroBased() < b.columns.zeroBased(); + } + return a_index < b_index; } }; From 4f1b90ad1d0da07ae990cc6885a54cc8a306d1b2 Mon Sep 17 00:00:00 2001 From: robobun Date: Tue, 28 Oct 2025 12:32:15 -0700 Subject: [PATCH 021/102] Fix EventEmitter crash in removeAllListeners with removeListener meta-listener (#24148) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes #24147 - Fixed EventEmitter crash when `removeAllListeners()` is called from within an event handler while a `removeListener` meta-listener is registered - Added undefined check before iterating over listeners array to match Node.js behavior - Added comprehensive regression tests ## Bug Description When `removeAllListeners(type)` was called: 1. From within an event handler 2. While a `removeListener` meta-listener was registered 3. For an event type with no listeners It would crash with: `TypeError: undefined is not an object (evaluating 'this._events')` ## Root Cause The `removeAllListeners` function tried to access `listeners.length` without checking if `listeners` was defined first. When called with an event type that had no listeners, `events[type]` returned `undefined`, causing the crash. ## Fix Added a check `if (listeners !== undefined)` before iterating, matching the behavior in Node.js core: https://github.com/nodejs/node/blob/main/lib/events.js#L768 ## Test plan - ✅ Created regression test in `test/regression/issue/24147.test.ts` - ✅ Verified test fails with `USE_SYSTEM_BUN=1 bun test` (reproduces bug) - ✅ Verified test passes with `bun bd test` (confirms fix) - ✅ Test covers the exact reproduction case from the issue - ✅ Additional tests for edge cases (actual listeners, nested calls) 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot Co-authored-by: Claude --- src/js/node/events.ts | 4 +- test/regression/issue/24147.test.ts | 81 +++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 test/regression/issue/24147.test.ts diff --git a/src/js/node/events.ts b/src/js/node/events.ts index 069ee1c2e2..7ba8f6cd9d 100644 --- a/src/js/node/events.ts +++ b/src/js/node/events.ts @@ -392,7 +392,9 @@ EventEmitterPrototype.removeAllListeners = function removeAllListeners(type) { // emit in LIFO order const listeners = events[type]; - for (let i = listeners.length - 1; i >= 0; i--) this.removeListener(type, listeners[i]); + if (listeners !== undefined) { + for (let i = listeners.length - 1; i >= 0; i--) this.removeListener(type, listeners[i]); + } return this; }; diff --git a/test/regression/issue/24147.test.ts b/test/regression/issue/24147.test.ts new file mode 100644 index 0000000000..4fb7052185 --- /dev/null +++ b/test/regression/issue/24147.test.ts @@ -0,0 +1,81 @@ +// https://github.com/oven-sh/bun/issues/24147 +// EventEmitter: this._events becomes undefined when removeAllListeners() +// called from event handler with removeListener meta-listener + +import { EventEmitter } from "events"; +import assert from "node:assert"; +import { test } from "node:test"; + +test("removeAllListeners() from event handler with removeListener meta-listener", () => { + const emitter = new EventEmitter(); + + emitter.on("test", () => { + // This should not crash even though there are no 'foo' listeners + emitter.removeAllListeners("foo"); + }); + + // Register a removeListener meta-listener to trigger the bug + emitter.on("removeListener", () => {}); + + // This should not throw + assert.doesNotThrow(() => emitter.emit("test")); +}); + +test("removeAllListeners() with actual listeners to remove", () => { + const emitter = new EventEmitter(); + let fooCallCount = 0; + let removeListenerCallCount = 0; + + emitter.on("foo", () => fooCallCount++); + emitter.on("foo", () => fooCallCount++); + + emitter.on("test", () => { + // Remove all 'foo' listeners while inside an event handler + emitter.removeAllListeners("foo"); + }); + + // Track removeListener calls + emitter.on("removeListener", () => { + removeListenerCallCount++; + }); + + // Emit test event which triggers removeAllListeners + emitter.emit("test"); + + // Verify listeners were removed + assert.strictEqual(emitter.listenerCount("foo"), 0); + + // Verify removeListener was called twice (once for each foo listener) + assert.strictEqual(removeListenerCallCount, 2); + + // Verify foo listeners were never called + assert.strictEqual(fooCallCount, 0); +}); + +test("nested removeAllListeners() calls", () => { + const emitter = new EventEmitter(); + const events: string[] = []; + + emitter.on("outer", () => { + events.push("outer-start"); + emitter.removeAllListeners("inner"); + events.push("outer-end"); + }); + + emitter.on("inner", () => { + events.push("inner"); + }); + + emitter.on("removeListener", type => { + events.push(`removeListener:${String(type)}`); + }); + + // This should not crash + assert.doesNotThrow(() => emitter.emit("outer")); + + // Verify correct execution order + assert.deepStrictEqual(events, ["outer-start", "removeListener:inner", "outer-end"]); + + // Verify inner listeners were removed + assert.strictEqual(emitter.listenerCount("inner"), 0); +}); From 98c04e37ec7b95dd1453f4b9e804ee89877e291a Mon Sep 17 00:00:00 2001 From: robobun Date: Tue, 28 Oct 2025 12:32:53 -0700 Subject: [PATCH 022/102] Fix source index bounds check in sourcemap decoder (#24145) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fix the source index bounds check in `src/sourcemap/Mapping.zig` to correctly validate indices against the range `[0, sources_count)`. ## Changes - Changed the bounds check condition from `source_index > sources_count` to `source_index >= sources_count` on line 452 - This prevents accepting `source_index == sources_count`, which would be out of bounds when indexing into the sources array ## Test plan - [x] Built successfully with `bun bd` - The existing test suite should continue to pass 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-authored-by: Claude Bot Co-authored-by: Claude --- src/sourcemap/Mapping.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sourcemap/Mapping.zig b/src/sourcemap/Mapping.zig index 971a5d45c7..96b3791dd6 100644 --- a/src/sourcemap/Mapping.zig +++ b/src/sourcemap/Mapping.zig @@ -455,7 +455,7 @@ pub fn parse( } source_index += source_index_delta.value; - if (source_index < 0 or source_index > sources_count) { + if (source_index < 0 or source_index >= sources_count) { return .{ .fail = .{ .msg = "Invalid source index value", From fe1bc5663704586a46f062d36005d7c3023555d3 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 29 Oct 2025 07:16:32 +0100 Subject: [PATCH 023/102] Add workerd benchmark --- bench/react-hello-world/bun.lock | 14 ++--- bench/react-hello-world/package.json | 7 +-- .../react-hello-world.workerd.config.capnp | 23 ++++++++ .../react-hello-world.workerd.js | 53 +++++++++++++++++++ .../react-hello-world.workerd.jsx | 24 +++++++++ 5 files changed, 109 insertions(+), 12 deletions(-) create mode 100644 bench/react-hello-world/react-hello-world.workerd.config.capnp create mode 100644 bench/react-hello-world/react-hello-world.workerd.js create mode 100644 bench/react-hello-world/react-hello-world.workerd.jsx diff --git a/bench/react-hello-world/bun.lock b/bench/react-hello-world/bun.lock index 56594f42eb..218c02e565 100644 --- a/bench/react-hello-world/bun.lock +++ b/bench/react-hello-world/bun.lock @@ -4,20 +4,16 @@ "": { "name": "react-hello-world", "dependencies": { - "react": "next", - "react-dom": "next", + "react": "^19.2.0", + "react-dom": "^19.2.0", }, }, }, "packages": { - "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], + "react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="], - "loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="], + "react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="], - "react": ["react@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-l6RbwXa9Peerh9pQEq62DDypxSQfavbybY0wV1vwZ63X0P5VaaEesZAz1KPpnVvXjTtQaOMQsIPvnQwmaVqzTQ=="], - - "react-dom": ["react-dom@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "0.24.0-next-b72ed698f-20230303" }, "peerDependencies": { "react": "18.3.0-next-b72ed698f-20230303" } }, "sha512-0Gh/gmTT6H8KxswIQB/8shdTTfs6QIu86nNqZf3Y0RBqIwgTVxRaQVz14/Fw4/Nt81nK/Jt6KT4bx3yvOxZDGQ=="], - - "scheduler": ["scheduler@0.24.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-ct4DMMFbc2kFxCdvbG+i/Jn1S1oqrIFSn2VX/mam+Ya0iuNy+lb8rgT7A+YBUqrQNDaNEqABYI2sOQgqoRxp7w=="], + "scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="], } } diff --git a/bench/react-hello-world/package.json b/bench/react-hello-world/package.json index b114852054..ca4b400596 100644 --- a/bench/react-hello-world/package.json +++ b/bench/react-hello-world/package.json @@ -4,13 +4,14 @@ "description": "", "main": "react-hello-world.node.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "test": "echo \"Error: no test specified\" && exit 1", + "build:workerd": "bun build react-hello-world.workerd.jsx --outfile=react-hello-world.workerd.js --format=esm --production" }, "keywords": [], "author": "Colin McDonnell", "license": "ISC", "dependencies": { - "react": "next", - "react-dom": "next" + "react": "^19.2.0", + "react-dom": "^19.2.0" } } diff --git a/bench/react-hello-world/react-hello-world.workerd.config.capnp b/bench/react-hello-world/react-hello-world.workerd.config.capnp new file mode 100644 index 0000000000..e624b143be --- /dev/null +++ b/bench/react-hello-world/react-hello-world.workerd.config.capnp @@ -0,0 +1,23 @@ +using Workerd = import "/workerd/workerd.capnp"; + +const config :Workerd.Config = ( + services = [ + (name = "main", worker = .mainWorker), + ], + + sockets = [ + ( name = "http", + address = "*:3001", + http = (), + service = "main" + ), + ] +); + +const mainWorker :Workerd.Worker = ( + modules = [ + (name = "worker", esModule = embed "react-hello-world.workerd.js"), + ], + compatibilityDate = "2025-01-01", + compatibilityFlags = ["nodejs_compat"], +); diff --git a/bench/react-hello-world/react-hello-world.workerd.js b/bench/react-hello-world/react-hello-world.workerd.js new file mode 100644 index 0000000000..ae8c4334ed --- /dev/null +++ b/bench/react-hello-world/react-hello-world.workerd.js @@ -0,0 +1,53 @@ +var VC=Object.create;var{getPrototypeOf:SC,defineProperty:XE,getOwnPropertyNames:FC}=Object;var hC=Object.prototype.hasOwnProperty;var Dc=(f,u,c)=>{c=f!=null?VC(SC(f)):{};let y=u||!f||!f.__esModule?XE(c,"default",{value:f,enumerable:!0}):c;for(let _ of FC(f))if(!hC.call(y,_))XE(y,_,{get:()=>f[_],enumerable:!0});return y};var mx=(f,u)=>()=>(u||f((u={exports:{}}).exports,u),u.exports);var BE=(f,u)=>{for(var c in u)XE(f,c,{get:u[c],enumerable:!0,configurable:!0,set:(y)=>u[c]=()=>y})};var iC=(f,u)=>()=>(f&&(u=f(f=0)),u);var Dy=mx((_g)=>{var PE=Symbol.for("react.transitional.element"),tC=Symbol.for("react.portal"),KC=Symbol.for("react.fragment"),kC=Symbol.for("react.strict_mode"),dC=Symbol.for("react.profiler"),bC=Symbol.for("react.consumer"),lC=Symbol.for("react.context"),pC=Symbol.for("react.forward_ref"),qC=Symbol.for("react.suspense"),oC=Symbol.for("react.memo"),Yx=Symbol.for("react.lazy"),eC=Symbol.for("react.activity"),Hx=Symbol.iterator;function aC(f){if(f===null||typeof f!=="object")return null;return f=Hx&&f[Hx]||f["@@iterator"],typeof f==="function"?f:null}var Mx={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},nx=Object.assign,Nx={};function zc(f,u,c){this.props=f,this.context=u,this.refs=Nx,this.updater=c||Mx}zc.prototype.isReactComponent={};zc.prototype.setState=function(f,u){if(typeof f!=="object"&&typeof f!=="function"&&f!=null)throw Error("takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,f,u,"setState")};zc.prototype.forceUpdate=function(f){this.updater.enqueueForceUpdate(this,f,"forceUpdate")};function rx(){}rx.prototype=zc.prototype;function JE(f,u,c){this.props=f,this.context=u,this.refs=Nx,this.updater=c||Mx}var VE=JE.prototype=new rx;VE.constructor=JE;nx(VE,zc.prototype);VE.isPureReactComponent=!0;var Ix=Array.isArray;function ZE(){}var K={H:null,A:null,T:null,S:null},Dx=Object.prototype.hasOwnProperty;function SE(f,u,c){var y=c.ref;return{$$typeof:PE,type:f,key:u,ref:y!==void 0?y:null,props:c}}function sC(f,u){return SE(f.type,u,f.props)}function FE(f){return typeof f==="object"&&f!==null&&f.$$typeof===PE}function fg(f){var u={"=":"=0",":":"=2"};return"$"+f.replace(/[=:]/g,function(c){return u[c]})}var Ux=/\/+/g;function QE(f,u){return typeof f==="object"&&f!==null&&f.key!=null?fg(""+f.key):u.toString(36)}function ug(f){switch(f.status){case"fulfilled":return f.value;case"rejected":throw f.reason;default:switch(typeof f.status==="string"?f.then(ZE,ZE):(f.status="pending",f.then(function(u){f.status==="pending"&&(f.status="fulfilled",f.value=u)},function(u){f.status==="pending"&&(f.status="rejected",f.reason=u)})),f.status){case"fulfilled":return f.value;case"rejected":throw f.reason}}throw f}function $c(f,u,c,y,_){var E=typeof f;if(E==="undefined"||E==="boolean")f=null;var v=!1;if(f===null)v=!0;else switch(E){case"bigint":case"string":case"number":v=!0;break;case"object":switch(f.$$typeof){case PE:case tC:v=!0;break;case Yx:return v=f._init,$c(v(f._payload),u,c,y,_)}}if(v)return _=_(f),v=y===""?"."+QE(f,0):y,Ix(_)?(c="",v!=null&&(c=v.replace(Ux,"$&/")+"/"),$c(_,u,c,"",function(R){return R})):_!=null&&(FE(_)&&(_=sC(_,c+(_.key==null||f&&f.key===_.key?"":(""+_.key).replace(Ux,"$&/")+"/")+v)),u.push(_)),1;v=0;var T=y===""?".":y+":";if(Ix(f))for(var x=0;xix,useFormStatus:()=>hx,useFormState:()=>Fx,unstable_batchedUpdates:()=>Sx,requestFormReset:()=>Vx,preloadModule:()=>Jx,preload:()=>Px,preinitModule:()=>Zx,preinit:()=>Qx,prefetchDNS:()=>Bx,preconnect:()=>Xx,flushSync:()=>Gx,createPortal:()=>jx,__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE:()=>Wx});function zx(f){var u="https://react.dev/errors/"+f;if(1{$x=Dc(Dy(),1);zf={d:{f:Ku,r:function(){throw Error(zx(522))},D:Ku,C:Ku,L:Ku,m:Ku,X:Ku,S:Ku,M:Ku},p:0,findDOMNode:null},lg=Symbol.for("react.portal");$y=$x.__CLIENT_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE;Wx=zf});var iE=mx((Zw,kx)=>{tx();function Kx(){if(typeof __REACT_DEVTOOLS_GLOBAL_HOOK__>"u"||typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE!=="function")return;try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(Kx)}catch(f){console.error(f)}}Kx(),kx.exports=hE});var bc=Dc(Dy(),1);var Mv={};BE(Mv,{version:()=>cR,renderToString:()=>uR,renderToStaticMarkup:()=>fR});var Q_=Dc(Dy(),1),YT=Dc(iE(),1);function n(f){var u="https://react.dev/errors/"+f;if(1>>16)&65535)<<16)&4294967295,E=E<<15|E>>>17,E=461845907*(E&65535)+((461845907*(E>>>16)&65535)<<16)&4294967295,_^=E,_=_<<13|_>>>19,_=5*(_&65535)+((5*(_>>>16)&65535)<<16)&4294967295,_=(_&65535)+27492+(((_>>>16)+58964&65535)<<16)}switch(E=0,c){case 3:E^=(f.charCodeAt(u+2)&255)<<16;case 2:E^=(f.charCodeAt(u+1)&255)<<8;case 1:E^=f.charCodeAt(u)&255,E=3432918353*(E&65535)+((3432918353*(E>>>16)&65535)<<16)&4294967295,E=E<<15|E>>>17,_^=461845907*(E&65535)+((461845907*(E>>>16)&65535)<<16)&4294967295}return _^=f.length,_^=_>>>16,_=2246822507*(_&65535)+((2246822507*(_>>>16)&65535)<<16)&4294967295,_^=_>>>13,_=3266489909*(_&65535)+((3266489909*(_>>>16)&65535)<<16)&4294967295,(_^_>>>16)>>>0}var Zf=Object.assign,k=Object.prototype.hasOwnProperty,sg=RegExp("^[:A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD][:A-Z_a-z\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"),lx={},px={};function Rv(f){if(k.call(px,f))return!0;if(k.call(lx,f))return!1;if(sg.test(f))return px[f]=!0;return lx[f]=!0,!1}var fO=new Set("animationIterationCount aspectRatio borderImageOutset borderImageSlice borderImageWidth boxFlex boxFlexGroup boxOrdinalGroup columnCount columns flex flexGrow flexPositive flexShrink flexNegative flexOrder gridArea gridRow gridRowEnd gridRowSpan gridRowStart gridColumn gridColumnEnd gridColumnSpan gridColumnStart fontWeight lineClamp lineHeight opacity order orphans scale tabSize widows zIndex zoom fillOpacity floodOpacity stopOpacity strokeDasharray strokeDashoffset strokeMiterlimit strokeOpacity strokeWidth MozAnimationIterationCount MozBoxFlex MozBoxFlexGroup MozLineClamp msAnimationIterationCount msFlex msZoom msFlexGrow msFlexNegative msFlexOrder msFlexPositive msFlexShrink msGridColumn msGridColumnSpan msGridRow msGridRowSpan WebkitAnimationIterationCount WebkitBoxFlex WebKitBoxFlexGroup WebkitBoxOrdinalGroup WebkitColumnCount WebkitColumns WebkitFlex WebkitFlexGrow WebkitFlexPositive WebkitFlexShrink WebkitLineClamp".split(" ")),uO=new Map([["acceptCharset","accept-charset"],["htmlFor","for"],["httpEquiv","http-equiv"],["crossOrigin","crossorigin"],["accentHeight","accent-height"],["alignmentBaseline","alignment-baseline"],["arabicForm","arabic-form"],["baselineShift","baseline-shift"],["capHeight","cap-height"],["clipPath","clip-path"],["clipRule","clip-rule"],["colorInterpolation","color-interpolation"],["colorInterpolationFilters","color-interpolation-filters"],["colorProfile","color-profile"],["colorRendering","color-rendering"],["dominantBaseline","dominant-baseline"],["enableBackground","enable-background"],["fillOpacity","fill-opacity"],["fillRule","fill-rule"],["floodColor","flood-color"],["floodOpacity","flood-opacity"],["fontFamily","font-family"],["fontSize","font-size"],["fontSizeAdjust","font-size-adjust"],["fontStretch","font-stretch"],["fontStyle","font-style"],["fontVariant","font-variant"],["fontWeight","font-weight"],["glyphName","glyph-name"],["glyphOrientationHorizontal","glyph-orientation-horizontal"],["glyphOrientationVertical","glyph-orientation-vertical"],["horizAdvX","horiz-adv-x"],["horizOriginX","horiz-origin-x"],["imageRendering","image-rendering"],["letterSpacing","letter-spacing"],["lightingColor","lighting-color"],["markerEnd","marker-end"],["markerMid","marker-mid"],["markerStart","marker-start"],["overlinePosition","overline-position"],["overlineThickness","overline-thickness"],["paintOrder","paint-order"],["panose-1","panose-1"],["pointerEvents","pointer-events"],["renderingIntent","rendering-intent"],["shapeRendering","shape-rendering"],["stopColor","stop-color"],["stopOpacity","stop-opacity"],["strikethroughPosition","strikethrough-position"],["strikethroughThickness","strikethrough-thickness"],["strokeDasharray","stroke-dasharray"],["strokeDashoffset","stroke-dashoffset"],["strokeLinecap","stroke-linecap"],["strokeLinejoin","stroke-linejoin"],["strokeMiterlimit","stroke-miterlimit"],["strokeOpacity","stroke-opacity"],["strokeWidth","stroke-width"],["textAnchor","text-anchor"],["textDecoration","text-decoration"],["textRendering","text-rendering"],["transformOrigin","transform-origin"],["underlinePosition","underline-position"],["underlineThickness","underline-thickness"],["unicodeBidi","unicode-bidi"],["unicodeRange","unicode-range"],["unitsPerEm","units-per-em"],["vAlphabetic","v-alphabetic"],["vHanging","v-hanging"],["vIdeographic","v-ideographic"],["vMathematical","v-mathematical"],["vectorEffect","vector-effect"],["vertAdvY","vert-adv-y"],["vertOriginX","vert-origin-x"],["vertOriginY","vert-origin-y"],["wordSpacing","word-spacing"],["writingMode","writing-mode"],["xmlnsXlink","xmlns:xlink"],["xHeight","x-height"]]),cO=/["'&<>]/;function X(f){if(typeof f==="boolean"||typeof f==="number"||typeof f==="bigint")return""+f;f=""+f;var u=cO.exec(f);if(u){var c="",y,_=0;for(y=u.index;yf.insertionMode)return Rf(3,null,y,null);break;case"html":if(f.insertionMode===0)return Rf(1,null,y,null)}return 6<=f.insertionMode||2>f.insertionMode?Rf(2,null,y,null):f.tagScope!==y?Rf(f.insertionMode,f.selectedValue,y,null):f}function BT(f){return f===null?null:{update:f.update,enter:"none",exit:"none",share:f.update,name:f.autoName,autoName:f.autoName,nameIdx:0}}function pE(f,u){return u.tagScope&32&&(f.instructions|=128),Rf(u.insertionMode,u.selectedValue,u.tagScope|12,BT(u.viewTransition))}function w_(f,u){f=BT(u.viewTransition);var c=u.tagScope|16;return f!==null&&f.share!=="none"&&(c|=64),Rf(u.insertionMode,u.selectedValue,c,f)}var ox=new Map;function QT(f,u){if(typeof u!=="object")throw Error(n(62));var c=!0,y;for(y in u)if(k.call(u,y)){var _=u[y];if(_!=null&&typeof _!=="boolean"&&_!==""){if(y.indexOf("--")===0){var E=X(y);_=X((""+_).trim())}else E=ox.get(y),E===void 0&&(E=X(y.replace(yO,"-$1").toLowerCase().replace(_O,"-ms-")),ox.set(y,E)),_=typeof _==="number"?_===0||fO.has(y)?""+_:_+"px":X((""+_).trim());c?(c=!1,f.push(' style="',E,":",_)):f.push(";",E,":",_)}}c||f.push('"')}function qE(f,u,c){c&&typeof c!=="function"&&typeof c!=="symbol"&&f.push(" ",u,'=""')}function Af(f,u,c){typeof c!=="function"&&typeof c!=="symbol"&&typeof c!=="boolean"&&f.push(" ",u,'="',X(c),'"')}var ZT=X("javascript:throw new Error('React form unexpectedly submitted.')");function tE(f,u){this.push('")}function PT(f){if(typeof f!=="string")throw Error(n(480))}function JT(f,u){if(typeof u.$$FORM_ACTION==="function"){var c=f.nextFormID++;f=f.idPrefix+c;try{var y=u.$$FORM_ACTION(f);if(y){var _=y.data;_!=null&&_.forEach(PT)}return y}catch(E){if(typeof E==="object"&&E!==null&&typeof E.then==="function")throw E}}return null}function ex(f,u,c,y,_,E,v,T){var x=null;if(typeof y==="function"){var R=JT(u,y);R!==null?(T=R.name,y=R.action||"",_=R.encType,E=R.method,v=R.target,x=R.data):(f.push(" ","formAction",'="',ZT,'"'),v=E=_=y=T=null,VT(u,c))}return T!=null&&V(f,"name",T),y!=null&&V(f,"formAction",y),_!=null&&V(f,"formEncType",_),E!=null&&V(f,"formMethod",E),v!=null&&V(f,"formTarget",v),x}function V(f,u,c){switch(u){case"className":Af(f,"class",c);break;case"tabIndex":Af(f,"tabindex",c);break;case"dir":case"role":case"viewBox":case"width":case"height":Af(f,u,c);break;case"style":QT(f,c);break;case"src":case"href":if(c==="")break;case"action":case"formAction":if(c==null||typeof c==="function"||typeof c==="symbol"||typeof c==="boolean")break;c=Xy(""+c),f.push(" ",u,'="',X(c),'"');break;case"defaultValue":case"defaultChecked":case"innerHTML":case"suppressContentEditableWarning":case"suppressHydrationWarning":case"ref":break;case"autoFocus":case"multiple":case"muted":qE(f,u.toLowerCase(),c);break;case"xlinkHref":if(typeof c==="function"||typeof c==="symbol"||typeof c==="boolean")break;c=Xy(""+c),f.push(" ","xlink:href",'="',X(c),'"');break;case"contentEditable":case"spellCheck":case"draggable":case"value":case"autoReverse":case"externalResourcesRequired":case"focusable":case"preserveAlpha":typeof c!=="function"&&typeof c!=="symbol"&&f.push(" ",u,'="',X(c),'"');break;case"inert":case"allowFullScreen":case"async":case"autoPlay":case"controls":case"default":case"defer":case"disabled":case"disablePictureInPicture":case"disableRemotePlayback":case"formNoValidate":case"hidden":case"loop":case"noModule":case"noValidate":case"open":case"playsInline":case"readOnly":case"required":case"reversed":case"scoped":case"seamless":case"itemScope":c&&typeof c!=="function"&&typeof c!=="symbol"&&f.push(" ",u,'=""');break;case"capture":case"download":c===!0?f.push(" ",u,'=""'):c!==!1&&typeof c!=="function"&&typeof c!=="symbol"&&f.push(" ",u,'="',X(c),'"');break;case"cols":case"rows":case"size":case"span":typeof c!=="function"&&typeof c!=="symbol"&&!isNaN(c)&&1<=c&&f.push(" ",u,'="',X(c),'"');break;case"rowSpan":case"start":typeof c==="function"||typeof c==="symbol"||isNaN(c)||f.push(" ",u,'="',X(c),'"');break;case"xlinkActuate":Af(f,"xlink:actuate",c);break;case"xlinkArcrole":Af(f,"xlink:arcrole",c);break;case"xlinkRole":Af(f,"xlink:role",c);break;case"xlinkShow":Af(f,"xlink:show",c);break;case"xlinkTitle":Af(f,"xlink:title",c);break;case"xlinkType":Af(f,"xlink:type",c);break;case"xmlBase":Af(f,"xml:base",c);break;case"xmlLang":Af(f,"xml:lang",c);break;case"xmlSpace":Af(f,"xml:space",c);break;default:if(!(2",`addEventListener("submit",function(a){if(!a.defaultPrevented){var c=a.target,d=a.submitter,e=c.action,b=d;if(d){var f=d.getAttribute("formAction");null!=f&&(e=f,b=null)}"javascript:throw new Error('React form unexpectedly submitted.')"===e&&(a.preventDefault(),b?(a=document.createElement("input"),a.name=b.name,a.value=b.value,b.parentNode.insertBefore(a,b),b=new FormData(c),a.parentNode.removeChild(a)):b=new FormData(c),a=c.ownerDocument||c,(a.$$reactFormReplay=a.$$reactFormReplay||[]).push(c,d,b))}});`,"")):y.unshift(u.startInlineScript,">",`addEventListener("submit",function(a){if(!a.defaultPrevented){var c=a.target,d=a.submitter,e=c.action,b=d;if(d){var f=d.getAttribute("formAction");null!=f&&(e=f,b=null)}"javascript:throw new Error('React form unexpectedly submitted.')"===e&&(a.preventDefault(),b?(a=document.createElement("input"),a.name=b.name,a.value=b.value,b.parentNode.insertBefore(a,b),b=new FormData(c),a.parentNode.removeChild(a)):b=new FormData(c),a=c.ownerDocument||c,(a.$$reactFormReplay=a.$$reactFormReplay||[]).push(c,d,b))}});`,"")}}function wf(f,u){f.push(cf("link"));for(var c in u)if(k.call(u,c)){var y=u[c];if(y!=null)switch(c){case"children":case"dangerouslySetInnerHTML":throw Error(n(399,"link"));default:V(f,c,y)}}return f.push("/>"),null}var ax=/(<\/|<)(s)(tyle)/gi;function sx(f,u,c,y){return""+u+(c==="s"?"\\73 ":"\\53 ")+y}function jc(f,u,c){f.push(cf(c));for(var y in u)if(k.call(u,y)){var _=u[y];if(_!=null)switch(y){case"children":case"dangerouslySetInnerHTML":throw Error(n(399,c));default:V(f,y,_)}}return f.push("/>"),null}function fT(f,u){f.push(cf("title"));var c=null,y=null,_;for(_ in u)if(k.call(u,_)){var E=u[_];if(E!=null)switch(_){case"children":c=E;break;case"dangerouslySetInnerHTML":y=E;break;default:V(f,_,E)}}return f.push(">"),u=Array.isArray(c)?2>c.length?c[0]:null:c,typeof u!=="function"&&typeof u!=="symbol"&&u!==null&&u!==void 0&&f.push(X(""+u)),kf(f,y,c),f.push(Ic("title")),null}function n_(f,u){f.push(cf("script"));var c=null,y=null,_;for(_ in u)if(k.call(u,_)){var E=u[_];if(E!=null)switch(_){case"children":c=E;break;case"dangerouslySetInnerHTML":y=E;break;default:V(f,_,E)}}return f.push(">"),kf(f,y,c),typeof c==="string"&&f.push((""+c).replace(GT,XT)),f.push(Ic("script")),null}function KE(f,u,c){f.push(cf(c));var y=c=null,_;for(_ in u)if(k.call(u,_)){var E=u[_];if(E!=null)switch(_){case"children":c=E;break;case"dangerouslySetInnerHTML":y=E;break;default:V(f,_,E)}}return f.push(">"),kf(f,y,c),c}function g_(f,u,c){f.push(cf(c));var y=c=null,_;for(_ in u)if(k.call(u,_)){var E=u[_];if(E!=null)switch(_){case"children":c=E;break;case"dangerouslySetInnerHTML":y=E;break;default:V(f,_,E)}}return f.push(">"),kf(f,y,c),typeof c==="string"?(f.push(X(c)),null):c}var RO=/^[a-zA-Z][a-zA-Z:_\.\-\d]*$/,uT=new Map;function cf(f){var u=uT.get(f);if(u===void 0){if(!RO.test(f))throw Error(n(65,f));u="<"+f,uT.set(f,u)}return u}function CO(f,u,c,y,_,E,v,T,x){switch(u){case"div":case"span":case"svg":case"path":break;case"a":f.push(cf("a"));var R=null,C=null,g;for(g in c)if(k.call(c,g)){var O=c[g];if(O!=null)switch(g){case"children":R=O;break;case"dangerouslySetInnerHTML":C=O;break;case"href":O===""?Af(f,"href",""):V(f,g,O);break;default:V(f,g,O)}}if(f.push(">"),kf(f,C,R),typeof R==="string"){f.push(X(R));var m=null}else m=R;return m;case"g":case"p":case"li":break;case"select":f.push(cf("select"));var M=null,U=null,I;for(I in c)if(k.call(c,I)){var Y=c[I];if(Y!=null)switch(I){case"children":M=Y;break;case"dangerouslySetInnerHTML":U=Y;break;case"defaultValue":case"value":break;default:V(f,I,Y)}}return f.push(">"),kf(f,U,M),M;case"option":var r=T.selectedValue;f.push(cf("option"));var G=null,B=null,z=null,L=null,e;for(e in c)if(k.call(c,e)){var b=c[e];if(b!=null)switch(e){case"children":G=b;break;case"selected":z=b;break;case"dangerouslySetInnerHTML":L=b;break;case"value":B=b;default:V(f,e,b)}}if(r!=null){var $=B!==null?""+B:TO(G);if(M_(r)){for(var a=0;a"),kf(f,L,G),G;case"textarea":f.push(cf("textarea"));var D=null,F=null,J=null,j;for(j in c)if(k.call(c,j)){var l=c[j];if(l!=null)switch(j){case"children":J=l;break;case"value":D=l;break;case"defaultValue":F=l;break;case"dangerouslySetInnerHTML":throw Error(n(91));default:V(f,j,l)}}if(D===null&&F!==null&&(D=F),f.push(">"),J!=null){if(D!=null)throw Error(n(92));if(M_(J)){if(1"),Du!=null&&Du.forEach(tE,f),null;case"button":f.push(cf("button"));var Eu=null,lc=null,pc=null,qc=null,oc=null,ec=null,ac=null,vu;for(vu in c)if(k.call(c,vu)){var uf=c[vu];if(uf!=null)switch(vu){case"children":Eu=uf;break;case"dangerouslySetInnerHTML":lc=uf;break;case"name":pc=uf;break;case"formAction":qc=uf;break;case"formEncType":oc=uf;break;case"formMethod":ec=uf;break;case"formTarget":ac=uf;break;default:V(f,vu,uf)}}var sc=ex(f,y,_,qc,oc,ec,ac,pc);if(f.push(">"),sc!=null&&sc.forEach(tE,f),kf(f,lc,Eu),typeof Eu==="string"){f.push(X(Eu));var fy=null}else fy=Eu;return fy;case"form":f.push(cf("form"));var xu=null,uy=null,Mf=null,Tu=null,Ru=null,Cu=null,gu;for(gu in c)if(k.call(c,gu)){var xf=c[gu];if(xf!=null)switch(gu){case"children":xu=xf;break;case"dangerouslySetInnerHTML":uy=xf;break;case"action":Mf=xf;break;case"encType":Tu=xf;break;case"method":Ru=xf;break;case"target":Cu=xf;break;default:V(f,gu,xf)}}var uc=null,cc=null;if(typeof Mf==="function"){var nf=JT(y,Mf);nf!==null?(Mf=nf.action||"",Tu=nf.encType,Ru=nf.method,Cu=nf.target,uc=nf.data,cc=nf.name):(f.push(" ","action",'="',ZT,'"'),Cu=Ru=Tu=Mf=null,VT(y,_))}if(Mf!=null&&V(f,"action",Mf),Tu!=null&&V(f,"encType",Tu),Ru!=null&&V(f,"method",Ru),Cu!=null&&V(f,"target",Cu),f.push(">"),cc!==null&&(f.push('"),uc!=null&&uc.forEach(tE,f)),kf(f,uy,xu),typeof xu==="string"){f.push(X(xu));var cy=null}else cy=xu;return cy;case"menuitem":f.push(cf("menuitem"));for(var $u in c)if(k.call(c,$u)){var yy=c[$u];if(yy!=null)switch($u){case"children":case"dangerouslySetInnerHTML":throw Error(n(400));default:V(f,$u,yy)}}return f.push(">"),null;case"object":f.push(cf("object"));var Ou=null,_y=null,Au;for(Au in c)if(k.call(c,Au)){var wu=c[Au];if(wu!=null)switch(Au){case"children":Ou=wu;break;case"dangerouslySetInnerHTML":_y=wu;break;case"data":var Ey=Xy(""+wu);if(Ey==="")break;f.push(" ","data",'="',X(Ey),'"');break;default:V(f,Au,wu)}}if(f.push(">"),kf(f,_y,Ou),typeof Ou==="string"){f.push(X(Ou));var vy=null}else vy=Ou;return vy;case"title":var YE=T.tagScope&1,ME=T.tagScope&4;if(T.insertionMode===4||YE||c.itemProp!=null)var yc=fT(f,c);else ME?yc=null:(fT(_.hoistableChunks,c),yc=void 0);return yc;case"link":var nE=T.tagScope&1,NE=T.tagScope&4,rE=c.rel,Tf=c.href,zu=c.precedence;if(T.insertionMode===4||nE||c.itemProp!=null||typeof rE!=="string"||typeof Tf!=="string"||Tf===""){wf(f,c);var mu=null}else if(c.rel==="stylesheet")if(typeof zu!=="string"||c.disabled!=null||c.onLoad||c.onError)mu=wf(f,c);else{var Vf=_.styles.get(zu),Wu=y.styleResources.hasOwnProperty(Tf)?y.styleResources[Tf]:void 0;if(Wu!==null){y.styleResources[Tf]=null,Vf||(Vf={precedence:X(zu),rules:[],hrefs:[],sheets:new Map},_.styles.set(zu,Vf));var ju={state:0,props:Zf({},c,{"data-precedence":c.precedence,precedence:null})};if(Wu){Wu.length===2&&By(ju.props,Wu);var _c=_.preloads.stylesheets.get(Tf);_c&&0<_c.length?_c.length=0:ju.state=1}Vf.sheets.set(Tf,ju),v&&v.stylesheets.add(ju)}else if(Vf){var xy=Vf.sheets.get(Tf);xy&&v&&v.stylesheets.add(xy)}x&&f.push(""),mu=null}else c.onLoad||c.onError?mu=wf(f,c):(x&&f.push(""),mu=NE?null:wf(_.hoistableChunks,c));return mu;case"script":var DE=T.tagScope&1,Ec=c.async;if(typeof c.src!=="string"||!c.src||!Ec||typeof Ec==="function"||typeof Ec==="symbol"||c.onLoad||c.onError||T.insertionMode===4||DE||c.itemProp!=null)var Ty=n_(f,c);else{var Gu=c.src;if(c.type==="module")var Xu=y.moduleScriptResources,Ry=_.preloads.moduleScripts;else Xu=y.scriptResources,Ry=_.preloads.scripts;var Bu=Xu.hasOwnProperty(Gu)?Xu[Gu]:void 0;if(Bu!==null){Xu[Gu]=null;var vc=c;if(Bu){Bu.length===2&&(vc=Zf({},c),By(vc,Bu));var Cy=Ry.get(Gu);Cy&&(Cy.length=0)}var gy=[];_.scripts.add(gy),n_(gy,vc)}x&&f.push(""),Ty=null}return Ty;case"style":var $E=T.tagScope&1,Qu=c.precedence,Sf=c.href,zE=c.nonce;if(T.insertionMode===4||$E||c.itemProp!=null||typeof Qu!=="string"||typeof Sf!=="string"||Sf===""){f.push(cf("style"));var Ff=null,Oy=null,Hu;for(Hu in c)if(k.call(c,Hu)){var Zu=c[Hu];if(Zu!=null)switch(Hu){case"children":Ff=Zu;break;case"dangerouslySetInnerHTML":Oy=Zu;break;default:V(f,Hu,Zu)}}f.push(">");var Iu=Array.isArray(Ff)?2>Ff.length?Ff[0]:null:Ff;typeof Iu!=="function"&&typeof Iu!=="symbol"&&Iu!==null&&Iu!==void 0&&f.push((""+Iu).replace(ax,sx)),kf(f,Oy,Ff),f.push(Ic("style"));var Ay=null}else{var Nf=_.styles.get(Qu);if((y.styleResources.hasOwnProperty(Sf)?y.styleResources[Sf]:void 0)!==null){y.styleResources[Sf]=null,Nf||(Nf={precedence:X(Qu),rules:[],hrefs:[],sheets:new Map},_.styles.set(Qu,Nf));var wy=_.nonce.style;if(!wy||wy===zE){Nf.hrefs.push(X(Sf));var my=Nf.rules,hf=null,Hy=null,Pu;for(Pu in c)if(k.call(c,Pu)){var xc=c[Pu];if(xc!=null)switch(Pu){case"children":hf=xc;break;case"dangerouslySetInnerHTML":Hy=xc}}var Uu=Array.isArray(hf)?2>hf.length?hf[0]:null:hf;typeof Uu!=="function"&&typeof Uu!=="symbol"&&Uu!==null&&Uu!==void 0&&my.push((""+Uu).replace(ax,sx)),kf(my,Hy,hf)}}Nf&&v&&v.styles.add(Nf),x&&f.push(""),Ay=void 0}return Ay;case"meta":var WE=T.tagScope&1,jE=T.tagScope&4;if(T.insertionMode===4||WE||c.itemProp!=null)var Iy=jc(f,c,"meta");else x&&f.push(""),Iy=jE?null:typeof c.charSet==="string"?jc(_.charsetChunks,c,"meta"):c.name==="viewport"?jc(_.viewportChunks,c,"meta"):jc(_.hoistableChunks,c,"meta");return Iy;case"listing":case"pre":f.push(cf(u));var Lu=null,Yu=null,Mu;for(Mu in c)if(k.call(c,Mu)){var Ju=c[Mu];if(Ju!=null)switch(Mu){case"children":Lu=Ju;break;case"dangerouslySetInnerHTML":Yu=Ju;break;default:V(f,Mu,Ju)}}if(f.push(">"),Yu!=null){if(Lu!=null)throw Error(n(60));if(typeof Yu!=="object"||!("__html"in Yu))throw Error(n(61));var rf=Yu.__html;rf!==null&&rf!==void 0&&(typeof rf==="string"&&0_.highImagePreloads.size)Tc.delete(tf),_.highImagePreloads.add(Df)}else if(!y.imageResources.hasOwnProperty(tf)){y.imageResources[tf]=sf;var Rc=c.crossOrigin,Ly=typeof Rc==="string"?Rc==="use-credentials"?Rc:"":void 0,$f=_.headers,Cc;$f&&0<$f.remainingCapacity&&typeof c.srcSet!=="string"&&(c.fetchPriority==="high"||500>$f.highImagePreloads.length)&&(Cc=r_(Z,"image",{imageSrcSet:c.srcSet,imageSizes:c.sizes,crossOrigin:Ly,integrity:c.integrity,nonce:c.nonce,type:c.type,fetchPriority:c.fetchPriority,referrerPolicy:c.refererPolicy}),0<=($f.remainingCapacity-=Cc.length+2))?(_.resets.image[tf]=sf,$f.highImagePreloads&&($f.highImagePreloads+=", "),$f.highImagePreloads+=Cc):(Df=[],wf(Df,{rel:"preload",as:"image",href:Q?void 0:Z,imageSrcSet:Q,imageSizes:Uy,crossOrigin:Ly,integrity:c.integrity,type:c.type,fetchPriority:c.fetchPriority,referrerPolicy:c.referrerPolicy}),c.fetchPriority==="high"||10>_.highImagePreloads.size?_.highImagePreloads.add(Df):(_.bulkPreloads.add(Df),Tc.set(tf,Df)))}}return jc(f,c,"img");case"base":case"area":case"br":case"col":case"embed":case"hr":case"keygen":case"param":case"source":case"track":case"wbr":return jc(f,c,u);case"annotation-xml":case"color-profile":case"font-face":case"font-face-src":case"font-face-uri":case"font-face-format":case"font-face-name":case"missing-glyph":break;case"head":if(2>T.insertionMode){var gc=E||_.preamble;if(gc.headChunks)throw Error(n(545,"``"));E!==null&&f.push(""),gc.headChunks=[];var Yy=KE(gc.headChunks,c,"head")}else Yy=g_(f,c,"head");return Yy;case"body":if(2>T.insertionMode){var Oc=E||_.preamble;if(Oc.bodyChunks)throw Error(n(545,"``"));E!==null&&f.push(""),Oc.bodyChunks=[];var My=KE(Oc.bodyChunks,c,"body")}else My=g_(f,c,"body");return My;case"html":if(T.insertionMode===0){var Ac=E||_.preamble;if(Ac.htmlChunks)throw Error(n(545,"``"));E!==null&&f.push(""),Ac.htmlChunks=[""];var ny=KE(Ac.htmlChunks,c,"html")}else ny=g_(f,c,"html");return ny;default:if(u.indexOf("-")!==-1){f.push(cf(u));var wc=null,Ny=null,Kf;for(Kf in c)if(k.call(c,Kf)){var p=c[Kf];if(p!=null){var ry=Kf;switch(Kf){case"children":wc=p;break;case"dangerouslySetInnerHTML":Ny=p;break;case"style":QT(f,p);break;case"suppressContentEditableWarning":case"suppressHydrationWarning":case"ref":break;case"className":ry="class";default:if(Rv(Kf)&&typeof p!=="function"&&typeof p!=="symbol"&&p!==!1){if(p===!0)p="";else if(typeof p==="object")continue;f.push(" ",ry,'="',X(p),'"')}}}}return f.push(">"),kf(f,Ny,wc),wc}}return g_(f,c,u)}var cT=new Map;function Ic(f){var u=cT.get(f);return u===void 0&&(u="",cT.set(f,u)),u}function yT(f,u){f=f.preamble,f.htmlChunks===null&&u.htmlChunks&&(f.htmlChunks=u.htmlChunks),f.headChunks===null&&u.headChunks&&(f.headChunks=u.headChunks),f.bodyChunks===null&&u.bodyChunks&&(f.bodyChunks=u.bodyChunks)}function ST(f,u){u=u.bootstrapChunks;for(var c=0;c')}function gO(f,u,c,y){switch(c.insertionMode){case 0:case 1:case 3:case 2:return f.push('