From c2c204807242340b7dfe6537d84771bdff7bb85e Mon Sep 17 00:00:00 2001 From: dave caruso Date: Thu, 12 Sep 2024 16:44:03 -0700 Subject: [PATCH] framework api: init / work in progress (#13215) Co-authored-by: Jarred Sumner --- .gitignore | 1 + build.zig | 63 +- cmake/targets/BuildBun.cmake | 44 + misctools/bun-feature-data.ts | 3 +- scripts/build.mjs | 29 +- src/ast/base.zig | 28 +- src/bun.js/ConsoleObject.zig | 37 +- src/bun.js/api/bun/socket.zig | 65 +- src/bun.js/api/bun/udp_socket.zig | 22 +- src/bun.js/api/ffi.zig | 37 +- src/bun.js/api/html_rewriter.zig | 6 +- src/bun.js/api/js_brotli.zig | 16 +- src/bun.js/api/js_zlib.zig | 40 +- src/bun.js/api/server.zig | 119 +- src/bun.js/bindings/BunObject.cpp | 7 + src/bun.js/bindings/ZigGlobalObject.cpp | 2 +- src/bun.js/bindings/ZigGlobalObject.h | 8 +- src/bun.js/bindings/bindings.cpp | 53 +- src/bun.js/bindings/bindings.zig | 185 +- src/bun.js/bindings/headers.h | 2 +- src/bun.js/bindings/headers.zig | 1 - .../bindings/webcore/DOMClientIsoSubspaces.h | 2 + src/bun.js/bindings/webcore/DOMIsoSubspaces.h | 2 + src/bun.js/event_loop.zig | 32 +- src/bun.js/javascript.zig | 219 +- src/bun.js/javascript_core_c_api.zig | 1 - src/bun.js/module_loader.zig | 50 - src/bun.js/node/node_fs_stat_watcher.zig | 19 +- src/bun.js/node/node_fs_watcher.zig | 12 +- src/bun.js/test/expect.zig | 71 +- src/bun.js/test/jest.zig | 23 +- src/bun.js/test/pretty_format.zig | 3 +- src/bun.js/web_worker.zig | 2 +- src/bun.js/webcore/blob/ReadFile.zig | 2 +- src/bun.js/webcore/blob/WriteFile.zig | 22 +- src/bun.js/webcore/body.zig | 2 +- src/bun.js/webcore/request.zig | 7 +- src/bun.js/webcore/response.zig | 12 +- src/bun.js/webcore/streams.zig | 3 +- src/bun.zig | 62 +- src/bun_js.zig | 12 +- src/bundler.zig | 1 - src/bundler/bundle_v2.zig | 1086 +++++--- src/cli.zig | 38 +- src/cli/build_command.zig | 37 +- src/cli/test_command.zig | 2 +- src/codegen/buildTypeFlag.ts | 4 +- src/codegen/kit-codegen.ts | 152 ++ src/defines.zig | 109 +- src/deps/uws.zig | 190 +- src/env.zig | 34 +- src/feature_flags.zig | 52 +- src/fs.zig | 8 + src/js_ast.zig | 299 +- src/js_parser.zig | 2413 ++++++++--------- src/js_printer.zig | 80 +- src/kit/DevServer.zig | 925 +++++++ src/kit/KitDevGlobalObject.cpp | 82 + src/kit/KitDevGlobalObject.h | 42 + src/kit/KitSourceProvider.cpp | 55 + src/kit/KitSourceProvider.h | 47 + src/kit/client/overlay.css | 18 + src/kit/client/overlay.ts | 27 + src/kit/hmr-module.ts | 65 + src/kit/hmr-runtime-types.d.ts | 35 + src/kit/hmr-runtime.ts | 46 + src/kit/kit.zig | 109 + src/kit/macros.ts | 11 + src/kit/tsconfig.json | 18 + src/logger.zig | 10 +- src/main.zig | 2 - src/napi/napi.zig | 11 +- src/options.zig | 63 +- src/react-refresh.js | 348 --- src/resolver/resolver.zig | 3 +- src/runtime.js | 113 +- src/runtime.zig | 53 +- src/shell/interpreter.zig | 12 +- src/sql/postgres.zig | 2 +- src/watcher.zig | 10 +- .../__snapshots__/bun-build-api.test.ts.snap | 475 ++++ test/bundler/bundler_kit_dev.test.ts | 39 + test/bundler/expectBundled.ts | 13 +- .../__snapshots__/transpiler.test.js.snap | 552 ++++ test/bundler/transpiler/transpiler.test.js | 22 +- .../test/__snapshots__/test-interop.js.snap | 8 + test/js/bun/test/expect.test.js | 2 + test/js/bun/test/jest-extended.test.js | 4 +- test/js/node/http/node-http.test.ts | 3 +- test/js/node/missing-module.test.js | 2 +- test/regression/issue/03844/03844.test.ts | 6 +- 91 files changed, 6013 insertions(+), 3051 deletions(-) create mode 100644 src/codegen/kit-codegen.ts create mode 100644 src/kit/DevServer.zig create mode 100644 src/kit/KitDevGlobalObject.cpp create mode 100644 src/kit/KitDevGlobalObject.h create mode 100644 src/kit/KitSourceProvider.cpp create mode 100644 src/kit/KitSourceProvider.h create mode 100644 src/kit/client/overlay.css create mode 100644 src/kit/client/overlay.ts create mode 100644 src/kit/hmr-module.ts create mode 100644 src/kit/hmr-runtime-types.d.ts create mode 100644 src/kit/hmr-runtime.ts create mode 100644 src/kit/kit.zig create mode 100644 src/kit/macros.ts create mode 100644 src/kit/tsconfig.json delete mode 100644 src/react-refresh.js create mode 100644 test/bundler/bundler_kit_dev.test.ts diff --git a/.gitignore b/.gitignore index d251b9e05e..126af7cebd 100644 --- a/.gitignore +++ b/.gitignore @@ -140,6 +140,7 @@ zig-out test/node.js/upstream .zig-cache scripts/env.local +*.generated.ts # Dependencies /vendor diff --git a/build.zig b/build.zig index 68496365d3..1f9654ea9c 100644 --- a/build.zig +++ b/build.zig @@ -44,10 +44,21 @@ const BunBuildOptions = struct { version: Version, canary_revision: ?u32, sha: []const u8, + /// enable debug logs in release builds enable_logs: bool = false, tracy_callstack_depth: u16, reported_nodejs_version: Version, + /// To make iterating on some '@embedFile's faster, we load them at runtime + /// instead of at compile time. This is disabled in release or if this flag + /// is set (to allow CI to build a portable executable). Affected files: + /// + /// - src/kit/runtime.ts (bundled) + /// - src/bun.js/api/FFI.h + /// + /// A similar technique is used in C++ code for JavaScript builtins + force_embed_code: bool = false, + /// `./build/codegen` or equivalent generated_code_dir: []const u8, no_llvm: bool, @@ -59,6 +70,10 @@ const BunBuildOptions = struct { !Target.x86.featureSetHas(this.target.result.cpu.features, .avx2); } + pub fn shouldEmbedCode(opts: *const BunBuildOptions) bool { + return opts.optimize != .Debug or opts.force_embed_code; + } + pub fn buildOptionsModule(this: *BunBuildOptions, b: *Build) *Module { if (this.cached_options_module) |mod| { return mod; @@ -66,6 +81,12 @@ const BunBuildOptions = struct { var opts = b.addOptions(); opts.addOption([]const u8, "base_path", b.pathFromRoot(".")); + opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{ + b.build_root.path.?, + this.generated_code_dir, + }) catch @panic("OOM")); + + opts.addOption(bool, "embed_code", this.shouldEmbedCode()); opts.addOption(u32, "canary_revision", this.canary_revision orelse 0); opts.addOption(bool, "is_canary", this.canary_revision != null); opts.addOption(Version, "version", this.version); @@ -134,6 +155,13 @@ pub fn build(b: *Build) !void { b.zig_lib_dir = b.zig_lib_dir orelse b.path("vendor/zig/lib"); + // TODO: Upgrade path for 0.14.0 + // b.graph.zig_lib_directory = brk: { + // const sub_path = "src/deps/zig/lib"; + // const dir = try b.build_root.handle.openDir(sub_path, .{}); + // break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) }; + // }; + var target_query = b.standardTargetOptionsQueryOnly(.{}); const optimize = b.standardOptimizeOption(.{}); @@ -172,6 +200,7 @@ pub fn build(b: *Build) !void { "build/codegen", ); const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0"; + const force_embed_js_code = b.option(bool, "force_embed_js_code", "Always embed JavaScript builtins") orelse false; b.reference_trace = ref_trace: { const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 16; @@ -190,6 +219,7 @@ pub fn build(b: *Build) !void { .arch = arch, .generated_code_dir = generated_code_dir, + .force_embed_code = force_embed_js_code, .no_llvm = no_llvm, .version = try Version.parse(bun_version), @@ -439,23 +469,22 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void { .root_source_file = b.path(async_path), }); - const zig_generated_classes_path = b.pathJoin(&.{ opts.generated_code_dir, "ZigGeneratedClasses.zig" }); - validateGeneratedPath(zig_generated_classes_path); - obj.root_module.addAnonymousImport("ZigGeneratedClasses", .{ - .root_source_file = .{ .cwd_relative = zig_generated_classes_path }, - }); - - const resolved_source_tag_path = b.pathJoin(&.{ opts.generated_code_dir, "ResolvedSourceTag.zig" }); - validateGeneratedPath(resolved_source_tag_path); - obj.root_module.addAnonymousImport("ResolvedSourceTag", .{ - .root_source_file = .{ .cwd_relative = resolved_source_tag_path }, - }); - - const error_code_path = b.pathJoin(&.{ opts.generated_code_dir, "ErrorCode.zig" }); - validateGeneratedPath(error_code_path); - obj.root_module.addAnonymousImport("ErrorCode", .{ - .root_source_file = .{ .cwd_relative = error_code_path }, - }); + // Generated code exposed as individual modules. + inline for (.{ + .{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" }, + .{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" }, + .{ .file = "ErrorCode.zig", .import = "ErrorCode" }, + .{ .file = "kit.client.js", .import = "kit-codegen/kit.client.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "kit.server.js", .import = "kit-codegen/kit.server.js", .enable = opts.shouldEmbedCode() }, + }) |entry| { + if (!@hasField(@TypeOf(entry), "enable") or entry.enable) { + const path = b.pathJoin(&.{ opts.generated_code_dir, entry.file }); + validateGeneratedPath(path); + obj.root_module.addAnonymousImport(entry.import, .{ + .root_source_file = .{ .cwd_relative = path }, + }); + } + } if (os == .windows) { obj.root_module.addAnonymousImport("bun_shim_impl.exe", .{ diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 62f297aa6f..d012066e84 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -326,6 +326,42 @@ register_command( ${BUN_JAVASCRIPT_OUTPUTS} ) +set(BUN_KIT_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/kit-codegen.ts) + +file(GLOB_RECURSE BUN_KIT_RUNTIME_SOURCES ${CONFIGURE_DEPENDS} + ${CWD}/src/kit/*.ts + ${CWD}/src/kit/*/*.ts +) + +list(APPEND BUN_KIT_RUNTIME_CODEGEN_SOURCES + ${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp +) + +set(BUN_KIT_RUNTIME_OUTPUTS + ${CODEGEN_PATH}/kit_empty_file + ${CODEGEN_PATH}/kit.client.js + ${CODEGEN_PATH}/kit.server.js +) + +register_command( + TARGET + bun-kit-codegen + COMMENT + "Bundling Kit Runtime" + COMMAND + ${BUN_EXECUTABLE} + run + ${BUN_KIT_RUNTIME_CODEGEN_SCRIPT} + --debug=${DEBUG} + --codegen_root=${CODEGEN_PATH} + SOURCES + ${BUN_KIT_RUNTIME_SOURCES} + ${BUN_KIT_RUNTIME_CODEGEN_SOURCES} + ${BUN_KIT_RUNTIME_CODEGEN_SCRIPT} + OUTPUTS + ${BUN_KIT_RUNTIME_OUTPUTS} +) + set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts) set(BUN_JS_SINK_SOURCES @@ -460,6 +496,13 @@ list(APPEND BUN_ZIG_SOURCES ${BUN_JAVASCRIPT_OUTPUTS} ) +if (CMAKE_BUILD_TYPE STREQUAL "Debug") + # in a debug build, these are not embedded, but rather referenced at runtime. + list(APPEND BUN_ZIG_SOURCES ${CODEGEN_PATH}/kit_empty_file) +else() + list(APPEND BUN_ZIG_SOURCES ${BUN_KIT_RUNTIME_OUTPUTS}) +endif() + set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o) register_command( @@ -506,6 +549,7 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS} ${CWD}/src/bun.js/bindings/webcrypto/*.cpp ${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp ${CWD}/src/bun.js/bindings/v8/*.cpp + ${CWD}/src/kit/*.cpp ${CWD}/src/deps/*.cpp ${BUN_USOCKETS_SOURCE}/src/crypto/*.cpp ) diff --git a/misctools/bun-feature-data.ts b/misctools/bun-feature-data.ts index ea2ee3b54f..5358a6584f 100644 --- a/misctools/bun-feature-data.ts +++ b/misctools/bun-feature-data.ts @@ -3,13 +3,14 @@ try { internal = require("bun:internal-for-testing"); } catch { const result = Bun.spawnSync({ - cmd: [process.execPath, import.meta.file], + cmd: [process.execPath, import.meta.path], env: { ...process.env, BUN_DEBUG_QUIET_LOGS: "1", BUN_GARBAGE_COLLECTOR_LEVEL: "0", BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1", }, + stdio: ['inherit', 'inherit', 'inherit'], }); process.exit(result.exitCode); } diff --git a/scripts/build.mjs b/scripts/build.mjs index 0da0628291..0bac459510 100644 --- a/scripts/build.mjs +++ b/scripts/build.mjs @@ -2,7 +2,7 @@ import { spawn as nodeSpawn } from "node:child_process"; import { existsSync, readFileSync, readdirSync, mkdirSync, cpSync, chmodSync } from "node:fs"; -import { join, relative, resolve } from "node:path"; +import { basename, join, relative, resolve } from "node:path"; // https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem const generateFlags = [ @@ -29,6 +29,8 @@ const buildFlags = [ ]; async function build(args) { + const startTime = Date.now(); + if (process.platform === "win32" && !process.env["VSINSTALLDIR"]) { const shellPath = join(import.meta.dirname, "vs-shell.ps1"); const scriptPath = import.meta.filename; @@ -100,7 +102,7 @@ async function build(args) { const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) => flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value], ); - await spawn("cmake", generateArgs, { env }); + await spawn("cmake", generateArgs, { env }, "configuration"); const envPath = resolve(buildPath, ".env"); if (existsSync(envPath)) { @@ -114,7 +116,7 @@ async function build(args) { const buildArgs = Object.entries(buildOptions) .sort(([a], [b]) => (a === "--build" ? -1 : a.localeCompare(b))) .flatMap(([flag, value]) => [flag, value]); - await spawn("cmake", buildArgs, { env }); + await spawn("cmake", buildArgs, { env }, "compilation"); const buildFiles = ["ccache.log", "compile_commands.json"]; const buildPaths = [buildPath, ...readdirSync(buildPath).map(path => join(buildPath, path))]; @@ -135,6 +137,8 @@ async function build(args) { ), ); } + + printDuration('total', Date.now() - startTime); } function cmakePath(path) { @@ -198,11 +202,13 @@ function parseOptions(args, flags = []) { return options; } -async function spawn(command, args, options) { +async function spawn(command, args, options, label) { const effectiveArgs = args.filter(Boolean); const description = [command, ...effectiveArgs].map(arg => (arg.includes(" ") ? JSON.stringify(arg) : arg)).join(" "); console.log("$", description); + label ??= basename(command); + const subprocess = nodeSpawn(command, effectiveArgs, { stdio: "pipe", ...options, @@ -232,12 +238,7 @@ async function spawn(command, args, options) { await done; - const duration = Date.now() - timestamp; - if (duration > 60000) { - console.log(`Took ${(duration / 60000).toFixed(2)} minutes`); - } else { - console.log(`Took ${(duration / 1000).toFixed(2)} seconds`); - } + printDuration(label, Date.now() - timestamp); if (exitCode === 0) { return; @@ -254,4 +255,12 @@ async function spawn(command, args, options) { process.exit(exitCode ?? 1); } +function printDuration(label, duration) { + if (duration > 60000) { + console.log(`${label} took ${(duration / 60000).toFixed(2)} minutes`); + } else { + console.log(`${label} took ${(duration / 1000).toFixed(2)} seconds`); + } +} + build(process.argv.slice(2)); diff --git a/src/ast/base.zig b/src/ast/base.zig index 20eb0183bb..26f3815e24 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -152,18 +152,11 @@ pub const Ref = packed struct(u64) { pub fn dump(ref: Ref, symbol_table: anytype) std.fmt.Formatter(dumpImpl) { return .{ .data = .{ .ref = ref, - .symbol_table = switch (@TypeOf(symbol_table)) { - *const std.ArrayList(js_ast.Symbol) => symbol_table.items, - *std.ArrayList(js_ast.Symbol) => symbol_table.items, - []const js_ast.Symbol => symbol_table, - []js_ast.Symbol => symbol_table, - else => |T| @compileError("Unsupported type to Ref.dump: " ++ @typeName(T)), - }, + .symbol = ref.getSymbol(symbol_table), } }; } - fn dumpImpl(data: struct { ref: Ref, symbol_table: []const js_ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - const symbol = data.symbol_table[data.ref.inner_index]; + fn dumpImpl(data: struct { ref: Ref, symbol: *js_ast.Symbol }, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { try std.fmt.format( writer, "Ref[inner={d}, src={d}, .{s}; original_name={s}, uses={d}]", @@ -171,8 +164,8 @@ pub const Ref = packed struct(u64) { data.ref.inner_index, data.ref.source_index, @tagName(data.ref.tag), - symbol.original_name, - symbol.use_count_estimate, + data.symbol.original_name, + data.symbol.use_count_estimate, }, ); } @@ -227,4 +220,17 @@ pub const Ref = packed struct(u64) { pub fn jsonStringify(self: *const Ref, writer: anytype) !void { return try writer.write([2]u32{ self.sourceIndex(), self.innerIndex() }); } + + pub fn getSymbol(ref: Ref, symbol_table: anytype) *js_ast.Symbol { + // Different parts of the bundler use different formats of the symbol table + // In the parser you only have one array, and .sourceIndex() is ignored. + // In the bundler, you have a 2D array where both parts of the ref are used. + const resolved_symbol_table = switch (@TypeOf(symbol_table)) { + *const std.ArrayList(js_ast.Symbol) => symbol_table.items, + *std.ArrayList(js_ast.Symbol) => symbol_table.items, + []js_ast.Symbol => symbol_table, + else => |T| @compileError("Unsupported type to Ref.getSymbol: " ++ @typeName(T)), + }; + return &resolved_symbol_table[ref.innerIndex()]; + } }; diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index a2c7be5e8d..185924190a 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -2317,7 +2317,8 @@ pub const Formatter = struct { .Object, Writer, writer_, - toJSONFunction.call(this.globalThis, value, &.{}), + toJSONFunction.call(this.globalThis, value, &.{}) catch |err| + this.globalThis.takeException(err), .Object, enable_ansi_colors, ); @@ -2332,7 +2333,8 @@ pub const Formatter = struct { .Object, Writer, writer_, - toJSONFunction.call(this.globalThis, value, &.{}), + toJSONFunction.call(this.globalThis, value, &.{}) catch |err| + this.globalThis.takeException(err), .Object, enable_ansi_colors, ); @@ -2384,15 +2386,9 @@ pub const Formatter = struct { writer.writeAll("Promise { " ++ comptime Output.prettyFmt("", enable_ansi_colors)); switch (JSPromise.status(@as(*JSPromise, @ptrCast(value.asObjectRef().?)), this.globalThis.vm())) { - JSPromise.Status.Pending => { - writer.writeAll(""); - }, - JSPromise.Status.Fulfilled => { - writer.writeAll(""); - }, - JSPromise.Status.Rejected => { - writer.writeAll(""); - }, + .pending => writer.writeAll(""), + .fulfilled => writer.writeAll(""), + .rejected => writer.writeAll(""), } writer.writeAll(comptime Output.prettyFmt("", enable_ansi_colors) ++ " }"); @@ -2580,15 +2576,16 @@ pub const Formatter = struct { writer.writeAll("}"); }, .toJSON => { - if (value.get(this.globalThis, "toJSON")) |func| { - const result = func.call(this.globalThis, value, &.{}); - if (result.toError() == null) { - const prev_quote_keys = this.quote_keys; - this.quote_keys = true; - defer this.quote_keys = prev_quote_keys; - this.printAs(.Object, Writer, writer_, result, value.jsType(), enable_ansi_colors); - return; - } + if (value.get(this.globalThis, "toJSON")) |func| brk: { + const result = func.call(this.globalThis, value, &.{}) catch { + this.globalThis.clearException(); + break :brk; + }; + const prev_quote_keys = this.quote_keys; + this.quote_keys = true; + defer this.quote_keys = prev_quote_keys; + this.printAs(.Object, Writer, writer_, result, value.jsType(), enable_ansi_colors); + return; } writer.writeAll("{}"); diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 49401fdba2..3c517b8e5b 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -210,10 +210,8 @@ const Handlers = struct { return false; } - const result = onError.call(globalObject, thisValue, err); - if (result.isAnyError()) { - _ = vm.uncaughtException(globalObject, result, false); - } + _ = onError.call(globalObject, thisValue, err) catch |e| + globalObject.reportActiveExceptionAsUnhandled(e); return true; } @@ -1438,13 +1436,9 @@ fn NewSocket(comptime ssl: bool) type { const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); - const result = callback.call(globalObject, this_value, &[_]JSValue{ - this_value, - }); - - if (result.toError()) |err_value| { - _ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err_value }); - } + _ = callback.call(globalObject, this_value, &.{this_value}) catch |err| { + _ = handlers.callErrorHandler(this_value, &.{ this_value, globalObject.takeException(err) }); + }; } pub fn onTimeout( this: *This, @@ -1468,14 +1462,11 @@ fn NewSocket(comptime ssl: bool) type { const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); - const result = callback.call(globalObject, this_value, &[_]JSValue{ - this_value, - }); - - if (result.toError()) |err_value| { - _ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err_value }); - } + _ = callback.call(globalObject, this_value, &.{this_value}) catch |err| { + _ = handlers.callErrorHandler(this_value, &.{ this_value, globalObject.takeException(err) }); + }; } + fn handleConnectError(this: *This, errno: c_int) void { log("onConnectError({d}, {})", .{ errno, this.ref_count }); const needs_deref = !this.socket.isDetached(); @@ -1522,7 +1513,7 @@ fn NewSocket(comptime ssl: bool) type { const result = callback.call(globalObject, this_value, &[_]JSValue{ this_value, err_value, - }); + }) catch |e| globalObject.takeException(e); if (result.toError()) |err_val| { if (handlers.rejectPromise(err_val)) return; @@ -1643,7 +1634,7 @@ fn NewSocket(comptime ssl: bool) type { defer vm.eventLoop().exit(); const result = callback.call(globalObject, this_value, &[_]JSValue{ this_value, - }); + }) catch |err| globalObject.takeException(err); if (result.toError()) |err| { defer this.markInactive(); @@ -1692,13 +1683,9 @@ fn NewSocket(comptime ssl: bool) type { const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); - const result = callback.call(globalObject, this_value, &[_]JSValue{ - this_value, - }); - - if (result.toError()) |err_value| { - _ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err_value }); - } + _ = callback.call(globalObject, this_value, &.{this_value}) catch |err| { + _ = handlers.callErrorHandler(this_value, &.{ this_value, globalObject.takeException(err) }); + }; } pub fn onHandshake(this: *This, _: Socket, success: i32, ssl_error: uws.us_bun_verify_error_t) void { @@ -1738,7 +1725,7 @@ fn NewSocket(comptime ssl: bool) type { // open callback only have 1 parameters and its the socket // you should use getAuthorizationError and authorized getter to get those values in this case if (is_open) { - result = callback.call(globalObject, this_value, &[_]JSValue{this_value}); + result = callback.call(globalObject, this_value, &[_]JSValue{this_value}) catch |err| globalObject.takeException(err); // only call onOpen once for clients if (!handlers.is_server) { @@ -1769,7 +1756,7 @@ fn NewSocket(comptime ssl: bool) type { this_value, JSValue.jsBoolean(authorized), authorization_error, - }); + }) catch |err| globalObject.takeException(err); } if (result.toError()) |err_value| { @@ -1808,14 +1795,12 @@ fn NewSocket(comptime ssl: bool) type { const globalObject = handlers.globalObject; const this_value = this.getThisValue(globalObject); - const result = callback.call(globalObject, this_value, &[_]JSValue{ + _ = callback.call(globalObject, this_value, &[_]JSValue{ this_value, JSValue.jsNumber(@as(i32, err)), - }); - - if (result.toError()) |err_value| { - _ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err_value }); - } + }) catch |e| { + _ = handlers.callErrorHandler(this_value, &.{ this_value, globalObject.takeException(e) }); + }; } pub fn onData(this: *This, _: Socket, data: []const u8) void { @@ -1840,14 +1825,12 @@ fn NewSocket(comptime ssl: bool) type { defer scope.exit(); // const encoding = handlers.encoding; - const result = callback.call(globalObject, this_value, &[_]JSValue{ + _ = callback.call(globalObject, this_value, &[_]JSValue{ this_value, output_value, - }); - - if (result.toError()) |err_value| { - _ = handlers.callErrorHandler(this_value, &[_]JSC.JSValue{ this_value, err_value }); - } + }) catch |err| { + _ = handlers.callErrorHandler(this_value, &.{ this_value, globalObject.takeException(err) }); + }; } pub fn getData( diff --git a/src/bun.js/api/bun/udp_socket.zig b/src/bun.js/api/bun/udp_socket.zig index a443346490..c2984b5629 100644 --- a/src/bun.js/api/bun/udp_socket.zig +++ b/src/bun.js/api/bun/udp_socket.zig @@ -42,10 +42,9 @@ fn onDrain(socket: *uws.udp.Socket) callconv(.C) void { const event_loop = vm.eventLoop(); event_loop.enter(); defer event_loop.exit(); - const result = callback.call(this.globalThis, this.thisValue, &[_]JSValue{this.thisValue}); - if (result.toError()) |err| { - _ = this.callErrorHandler(.zero, &[_]JSValue{err}); - } + _ = callback.call(this.globalThis, this.thisValue, &.{this.thisValue}) catch |err| { + _ = this.callErrorHandler(.zero, &.{this.globalThis.takeException(err)}); + }; } fn onData(socket: *uws.udp.Socket, buf: *uws.udp.PacketBuffer, packets: c_int) callconv(.C) void { @@ -91,16 +90,14 @@ fn onData(socket: *uws.udp.Socket, buf: *uws.udp.PacketBuffer, packets: c_int) c _ = udpSocket.js_refcount.fetchAdd(1, .monotonic); defer _ = udpSocket.js_refcount.fetchSub(1, .monotonic); - const result = callback.call(globalThis, udpSocket.thisValue, &[_]JSValue{ + _ = callback.call(globalThis, udpSocket.thisValue, &.{ udpSocket.thisValue, udpSocket.config.binary_type.toJS(slice, globalThis), JSC.jsNumber(port), JSC.ZigString.init(std.mem.span(hostname.?)).toJS(globalThis), - }); - - if (result.toError()) |err| { - _ = udpSocket.callErrorHandler(.zero, &[_]JSValue{err}); - } + }) catch |err| { + _ = udpSocket.callErrorHandler(.zero, &.{udpSocket.globalThis.takeException(err)}); + }; } } @@ -368,10 +365,7 @@ pub const UDPSocket = struct { return false; } - const result = callback.call(globalThis, thisValue, err); - if (result.isAnyError()) { - _ = vm.uncaughtException(globalThis, result, false); - } + _ = callback.call(globalThis, thisValue, err) catch |e| globalThis.reportActiveExceptionAsUnhandled(e); return true; } diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index 9839ef3ad7..a21f2a3482 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -727,26 +727,11 @@ pub const FFI = struct { }, }; - const FFI_HEADER: string = @embedFile("./FFI.h"); - pub inline fn ffiHeader() string { - if (comptime Environment.isDebug) { - const dirpath = comptime bun.Environment.base_path ++ (bun.Dirname.dirname(u8, @src().file) orelse ""); - var buf: bun.PathBuffer = undefined; - const user = bun.getUserName(&buf) orelse ""; - const dir = std.mem.replaceOwned( - u8, - default_allocator, - dirpath, - "jarred", - user, - ) catch unreachable; - const runtime_path = std.fs.path.join(default_allocator, &[_]string{ dir, "FFI.h" }) catch unreachable; - const file = std.fs.openFileAbsolute(runtime_path, .{}) catch @panic("Missing bun/src/bun.js/api/FFI.h."); - defer file.close(); - return file.readToEndAlloc(default_allocator, file.getEndPos() catch unreachable) catch unreachable; - } else { - return FFI_HEADER; - } + pub fn ffiHeader() string { + return if (Environment.embed_code) + @embedFile("./FFI.h") + else + bun.runtimeEmbedFile(.src, "bun.js/api/FFI.h"); } pub fn handleTCCError(ctx: ?*anyopaque, message: [*c]const u8) callconv(.C) void { @@ -1082,11 +1067,7 @@ pub const FFI = struct { } } - if (comptime Environment.isRelease) { - try writer.writeAll(bun.asByteSlice(FFI_HEADER)); - } else { - try writer.writeAll(ffiHeader()); - } + try writer.writeAll(ffiHeader()); // -- Generate the FFI function symbol try writer.writeAll("/* --- The Function To Call */\n"); @@ -1252,11 +1233,7 @@ pub const FFI = struct { } } - if (comptime Environment.isRelease) { - try writer.writeAll(bun.asByteSlice(FFI_HEADER)); - } else { - try writer.writeAll(ffiHeader()); - } + try writer.writeAll(ffiHeader()); // -- Generate the FFI function symbol try writer.writeAll("\n \n/* --- The Callback Function */\n"); diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index ea59070a26..ef86b5083f 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -880,14 +880,14 @@ fn HandlerCallback( @field(zig_element, field_name) = value; defer @field(zig_element, field_name) = null; - var result = @field(this, callback_name).?.call( + const result = @field(this, callback_name).?.call( this.global, if (comptime @hasField(HandlerType, "thisObject")) @field(this, "thisObject") else JSValue.zero, &.{zig_element.toJS(this.global)}, - ); + ) catch |err| this.global.takeException(err); if (!result.isUndefinedOrNull()) { if (result.isError() or result.isAggregateError(this.global)) { @@ -896,7 +896,7 @@ fn HandlerCallback( if (result.asAnyPromise()) |promise| { this.global.bunVM().waitForPromise(promise); - const fail = promise.status(this.global.vm()) == .Rejected; + const fail = promise.status(this.global.vm()) == .rejected; if (fail) { _ = this.global.bunVM().unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue(this.global)); } diff --git a/src/bun.js/api/js_brotli.zig b/src/bun.js/api/js_brotli.zig index 3e9728bd17..1141ecc88b 100644 --- a/src/bun.js/api/js_brotli.zig +++ b/src/bun.js/api/js_brotli.zig @@ -162,18 +162,14 @@ pub const BrotliEncoder = struct { defer _ = this.has_pending_activity.fetchSub(1, .monotonic); this.drainFreelist(); - const result = this.callback_value.get().?.call( + _ = this.callback_value.get().?.call( this.globalThis, .undefined, if (this.write_failure != null) &.{this.write_failure.?.toError(this.globalThis)} else &.{ .null, this.collectOutputValue() }, - ); - - if (result.toError()) |err| { - _ = this.globalThis.bunVM().uncaughtException(this.globalThis, err, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); } // We can only run one encode job at a time @@ -525,18 +521,14 @@ pub const BrotliDecoder = struct { defer _ = this.has_pending_activity.fetchSub(1, .monotonic); this.drainFreelist(); - const result = this.callback_value.get().?.call( + _ = this.callback_value.get().?.call( this.globalThis, .undefined, if (this.write_failure != null) &.{this.write_failure.?.toError(this.globalThis)} else &.{ .null, this.collectOutputValue() }, - ); - - if (result.toError()) |err| { - _ = this.globalThis.bunVM().uncaughtException(this.globalThis, err, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); } fn drainFreelist(this: *BrotliDecoder) void { diff --git a/src/bun.js/api/js_zlib.zig b/src/bun.js/api/js_zlib.zig index 7866f6a7da..e4dfd6e0c9 100644 --- a/src/bun.js/api/js_zlib.zig +++ b/src/bun.js/api/js_zlib.zig @@ -179,7 +179,7 @@ pub const ZlibEncoder = struct { const thisctx = arguments.ptr[2]; const is_last = callframe.argument(3).toBoolean(); - const push_fn = thisctx.get(globalThis, "push") orelse { + const push_fn: JSC.JSValue = thisctx.get(globalThis, "push") orelse { globalThis.throw("are you sure this is a stream.Transform?", .{}); return .zero; }; @@ -207,7 +207,8 @@ pub const ZlibEncoder = struct { err_buffer_too_large.throw(); return .zero; } - if (this.output.items.len > 0) runCallback(push_fn, globalThis, thisctx, &.{this.collectOutputValue()}) orelse return .zero; + if (this.output.items.len > 0) _ = push_fn.call(globalThis, thisctx, &.{this.collectOutputValue()}) catch + return .zero; if (done) break; } } @@ -217,7 +218,8 @@ pub const ZlibEncoder = struct { globalThis.ERR_BUFFER_TOO_LARGE("Cannot create a Buffer larger than {d} bytes", .{this.maxOutputLength}).throw(); return .zero; } - if (this.output.items.len > 0) runCallback(push_fn, globalThis, thisctx, &.{this.collectOutputValue()}) orelse return .zero; + if (this.output.items.len > 0) _ = push_fn.call(globalThis, thisctx, &.{this.collectOutputValue()}) catch + return .zero; } return .undefined; } @@ -270,18 +272,14 @@ pub const ZlibEncoder = struct { defer _ = this.has_pending_activity.fetchSub(1, .monotonic); this.drainFreelist(); - const result = this.callback_value.get().?.call( + _ = this.callback_value.get().?.call( this.globalThis, .undefined, if (this.write_failure != null) &.{this.write_failure.?.toError(this.globalThis)} else &.{ .null, this.collectOutputValue() }, - ); - - if (result.toError()) |err| { - _ = this.globalThis.bunVM().uncaughtException(this.globalThis, err, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); } pub fn hasPendingActivity(this: *@This()) callconv(.C) bool { @@ -346,7 +344,7 @@ pub const ZlibEncoder = struct { this.encoder.stream.write(input.slice(), output, true) catch |e| { any = true; _ = this.encoder.pending_encode_job_count.fetchSub(1, .monotonic); - this.encoder.write_failure = JSC.DeferredError.from(.plainerror, .ERR_OPERATION_FAILED, "ZlibError: {s}", .{@errorName(e)}); // TODO propogate better error + this.encoder.write_failure = JSC.DeferredError.from(.plainerror, .ERR_OPERATION_FAILED, "ZlibError: {s}", .{@errorName(e)}); // TODO propagate better error break :outer; }; if (this.encoder.output.items.len > this.encoder.maxOutputLength) { @@ -607,7 +605,7 @@ pub const ZlibDecoder = struct { const thisctx = arguments.ptr[2]; const is_last = callframe.argument(3).toBoolean(); - const push_fn = thisctx.get(globalThis, "push") orelse { + const push_fn: JSC.JSValue = thisctx.get(globalThis, "push") orelse { globalThis.throw("are you sure this is a stream.Transform?", .{}); return .zero; }; @@ -635,7 +633,8 @@ pub const ZlibDecoder = struct { err_buffer_too_large.throw(); return .zero; } - if (this.output.items.len > 0) runCallback(push_fn, globalThis, thisctx, &.{this.collectOutputValue()}) orelse return .zero; + if (this.output.items.len > 0) _ = push_fn.call(globalThis, thisctx, &.{this.collectOutputValue()}) catch + return .zero; if (done) break; } } @@ -645,7 +644,8 @@ pub const ZlibDecoder = struct { globalThis.ERR_BUFFER_TOO_LARGE("Cannot create a Buffer larger than {d} bytes", .{this.maxOutputLength}).throw(); return .zero; } - if (this.output.items.len > 0) runCallback(push_fn, globalThis, thisctx, &.{this.collectOutputValue()}) orelse return .zero; + if (this.output.items.len > 0) _ = push_fn.call(globalThis, thisctx, &.{this.collectOutputValue()}) catch + return .zero; } return .undefined; } @@ -698,18 +698,14 @@ pub const ZlibDecoder = struct { defer _ = this.has_pending_activity.fetchSub(1, .monotonic); this.drainFreelist(); - const result = this.callback_value.get().?.call( + _ = this.callback_value.get().?.call( this.globalThis, .undefined, if (this.write_failure != null) &.{this.write_failure.?.toError(this.globalThis)} else &.{ .null, this.collectOutputValue() }, - ); - - if (result.toError()) |err| { - _ = this.globalThis.bunVM().uncaughtException(this.globalThis, err, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); } pub fn hasPendingActivity(this: *@This()) callconv(.C) bool { @@ -967,9 +963,3 @@ fn handleTransformSyncStreamError(err: anyerror, globalThis: *JSC.JSGlobalObject closed.* = true; return .zero; } - -fn runCallback(callback: JSC.JSValue, globalObject: *JSC.JSGlobalObject, thisValue: JSC.JSValue, arguments: []const JSC.JSValue) ?void { - _ = callback.call(globalObject, thisValue, arguments); - if (globalObject.hasException()) return null; - return; -} diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index cdbdf2c0d5..e3f3f43ae9 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2990,7 +2990,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.drainMicrotasks(); switch (promise.status(globalThis.vm())) { - .Pending => { + .pending => { streamLog("promise still Pending", .{}); if (!this.flags.has_written_status) { response_stream.sink.onFirstWrite = null; @@ -3015,7 +3015,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp // the response_stream should be GC'd }, - .Fulfilled => { + .fulfilled => { streamLog("promise Fulfilled", .{}); var readable_stream_ref = this.readable_stream_ref; this.readable_stream_ref = .{}; @@ -3026,7 +3026,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.handleResolveStream(); }, - .Rejected => { + .rejected => { streamLog("promise Rejected", .{}); var readable_stream_ref = this.readable_stream_ref; this.readable_stream_ref = .{}; @@ -3236,16 +3236,17 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp return; } - var wait_for_promise = false; var vm = this.vm; if (response_value.asAnyPromise()) |promise| { // If we immediately have the value available, we can skip the extra event loop tick - switch (promise.status(vm.global.vm())) { - .Pending => {}, - .Fulfilled => { - const fulfilled_value = promise.result(vm.global.vm()); - + switch (promise.unwrap(vm.global.vm(), .mark_handled)) { + .pending => { + ctx.ref(); + response_value.then(this.globalThis, ctx, RequestContext.onResolve, RequestContext.onReject); + return; + }, + .fulfilled => |fulfilled_value| { // if you return a Response object or a Promise // but you upgraded the connection to a WebSocket // just ignore the Response object. It doesn't do anything. @@ -3285,19 +3286,11 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp ctx.render(response); return; }, - .Rejected => { - promise.setHandled(vm.global.vm()); - ctx.handleReject(promise.result(vm.global.vm())); + .rejected => |err| { + ctx.handleReject(err); return; }, } - wait_for_promise = true; - } - - if (wait_for_promise) { - ctx.ref(); - response_value.then(this.globalThis, ctx, RequestContext.onResolve, RequestContext.onReject); - return; } } @@ -3703,7 +3696,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp server.globalThis, server.thisObject, &.{value}, - ); + ) catch |err| server.globalThis.takeException(err); defer result.ensureStillAlive(); if (!result.isEmptyOrUndefinedOrNull()) { if (result.toError()) |err| { @@ -3733,11 +3726,18 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp assert(ctx.server != null); var vm = ctx.server.?.vm; - switch (promise.status(vm.global.vm())) { - .Pending => {}, - .Fulfilled => { - const fulfilled_value = promise.result(vm.global.vm()); - + switch (promise.unwrap(vm.global.vm(), .mark_handled)) { + .pending => { + ctx.flags.is_error_promise_pending = true; + ctx.ref(); + promise_js.then( + ctx.server.?.globalThis, + ctx, + RequestContext.onResolve, + RequestContext.onReject, + ); + }, + .fulfilled => |fulfilled_value| { // if you return a Response object or a Promise // but you upgraded the connection to a WebSocket // just ignore the Response object. It doesn't do anything. @@ -3773,24 +3773,11 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp ctx.render(response); return; }, - .Rejected => { - promise.setHandled(vm.global.vm()); - ctx.finishRunningErrorHandler(promise.result(vm.global.vm()), status); + .rejected => |err| { + ctx.finishRunningErrorHandler(err, status); return; }, } - - // Promise is not fulfilled yet - { - ctx.flags.is_error_promise_pending = true; - ctx.ref(); - promise_js.then( - ctx.server.?.globalThis, - ctx, - RequestContext.onResolve, - RequestContext.onReject, - ); - } } pub fn runErrorHandlerWithStatusCode( @@ -4191,10 +4178,8 @@ pub const WebSocketServer = struct { pub fn runErrorCallback(this: *const Handler, vm: *JSC.VirtualMachine, globalObject: *JSC.JSGlobalObject, error_value: JSC.JSValue) void { const onError = this.onError; if (!onError.isEmptyOrUndefinedOrNull()) { - const err_ret = onError.call(globalObject, .undefined, &.{error_value}); - if (err_ret.toError()) |actual_err| { - _ = vm.uncaughtException(globalObject, actual_err, false); - } + _ = onError.call(globalObject, .undefined, &.{error_value}) catch |err| + this.globalObject.reportActiveExceptionAsUnhandled(err); return; } @@ -4515,10 +4500,11 @@ const Corker = struct { pub fn run(this: *Corker) void { const this_value = this.this_value; - this.result = if (this_value == .zero) - this.callback.call(this.globalObject, .undefined, this.args) - else - this.callback.call(this.globalObject, this_value, this.args); + this.result = this.callback.call( + this.globalObject, + if (this_value == .zero) .undefined else this_value, + this.args, + ) catch |err| this.globalObject.takeException(err); } }; @@ -4684,7 +4670,7 @@ pub const ServerWebSocket = struct { if (result.asAnyPromise()) |promise| { switch (promise.status(globalObject.vm())) { - .Rejected => { + .rejected => { _ = promise.result(globalObject.vm()); return; }, @@ -4759,16 +4745,15 @@ pub const ServerWebSocket = struct { loop.enter(); defer loop.exit(); - const result = cb.call( + _ = cb.call( globalThis, .undefined, &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) }, - ); - - if (result.toError()) |err| { + ) catch |e| { + const err = globalThis.takeException(e); log("onPing error", .{}); handler.runErrorCallback(vm, globalThis, err); - } + }; } pub fn onPong(this: *ServerWebSocket, _: uws.AnyWebSocket, data: []const u8) void { @@ -4788,16 +4773,15 @@ pub const ServerWebSocket = struct { loop.enter(); defer loop.exit(); - const result = cb.call( + _ = cb.call( globalThis, .undefined, &[_]JSC.JSValue{ this.getThisValue(), this.binaryToJS(globalThis, data) }, - ); - - if (result.toError()) |err| { + ) catch |e| { + const err = globalThis.takeException(e); log("onPong error", .{}); handler.runErrorCallback(vm, globalThis, err); - } + }; } pub fn onClose(this: *ServerWebSocket, _: uws.AnyWebSocket, code: i32, message: []const u8) void { @@ -4821,16 +4805,15 @@ pub const ServerWebSocket = struct { loop.enter(); defer loop.exit(); str.markUTF8(); - const result = handler.onClose.call( + _ = handler.onClose.call( globalObject, .undefined, &[_]JSC.JSValue{ this.getThisValue(), JSValue.jsNumber(code), str.toJS(globalObject) }, - ); - - if (result.toError()) |err| { + ) catch |e| { + const err = globalObject.takeException(e); log("onClose error", .{}); handler.runErrorCallback(vm, globalObject, err); - } + }; } this.this_value.unprotect(); @@ -6297,7 +6280,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp this.globalThis, this.thisObject, &[_]JSC.JSValue{request.toJS(this.globalThis)}, - ); + ) catch |err| this.globalThis.takeException(err); if (response_value.isAnyError()) { return JSC.JSPromise.rejectedPromiseValue(ctx, response_value); @@ -6955,7 +6938,8 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp const request_value = args[0]; request_value.ensureStillAlive(); - const response_value = this.config.onRequest.call(this.globalThis, this.thisObject, &args); + const response_value = this.config.onRequest.call(this.globalThis, this.thisObject, &args) catch |err| + this.globalThis.takeException(err); defer { // uWS request will not live longer than this function request_object.request_context.detachRequest(); @@ -7019,7 +7003,8 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp }; const request_value = args[0]; request_value.ensureStillAlive(); - const response_value = this.config.onRequest.call(this.globalThis, this.thisObject, &args); + const response_value = this.config.onRequest.call(this.globalThis, this.thisObject, &args) catch |err| + this.globalThis.takeException(err); defer { // uWS request will not live longer than this function request_object.request_context.detachRequest(); diff --git a/src/bun.js/bindings/BunObject.cpp b/src/bun.js/bindings/BunObject.cpp index 0b489400d4..46976c1f9b 100644 --- a/src/bun.js/bindings/BunObject.cpp +++ b/src/bun.js/bindings/BunObject.cpp @@ -318,6 +318,12 @@ static JSValue constructBunShell(VM& vm, JSObject* bunObject) return bunShell; } +// This value currently depends on a zig feature flag +extern "C" JSC::EncodedJSValue Bun__getTemporaryDevServer(JSC::JSGlobalObject* bunObject); +static JSValue constructBunKit(VM& vm, JSObject* bunObject) { + return JSC::JSValue::decode(Bun__getTemporaryDevServer(bunObject->globalObject())); +} + static JSValue constructDNSObject(VM& vm, JSObject* bunObject) { JSGlobalObject* globalObject = bunObject->globalObject(); @@ -631,6 +637,7 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj version constructBunVersion ReadOnly|DontDelete|PropertyCallback which BunObject_callback_which DontDelete|Function 1 write BunObject_callback_write DontDelete|Function 1 + wipDevServerDoNotUseYet constructBunKit DontEnum|ReadOnly|DontDelete|PropertyCallback @end */ diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 38ac73c926..5bc3358a5e 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -692,7 +692,7 @@ static String computeErrorInfo(JSC::VM& vm, Vector& stackTrace, Ordi return computeErrorInfoWithoutPrepareStackTrace(vm, globalObject, lexicalGlobalObject, stackTrace, line, column, sourceURL, errorInstance); } -// TODO: @paperdave: remove this wrapper and make the WTF::Function from JavaScriptCore expeect OrdinalNumber instead of unsigned. +// TODO: @paperdave: remove this wrapper and make the WTF::Function from JavaScriptCore expect OrdinalNumber instead of unsigned. static String computeErrorInfoWrapper(JSC::VM& vm, Vector& stackTrace, unsigned int& line_in, unsigned int& column_in, String& sourceURL, JSObject* errorInstance) { OrdinalNumber line = OrdinalNumber::fromOneBasedInt(line_in); diff --git a/src/bun.js/bindings/ZigGlobalObject.h b/src/bun.js/bindings/ZigGlobalObject.h index d4ef700fb7..aa94f5abde 100644 --- a/src/bun.js/bindings/ZigGlobalObject.h +++ b/src/bun.js/bindings/ZigGlobalObject.h @@ -76,10 +76,11 @@ using DOMGuardedObjectSet = HashSet; class GlobalObject : public Bun::GlobalScope { using Base = Bun::GlobalScope; + +public: // Move this to the front for better cache locality. void* m_bunVM; -public: static const JSC::ClassInfo s_info; static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable; @@ -414,7 +415,7 @@ public: // The original, unmodified Error.prepareStackTrace. // - // We set a default value for this to mimick Node.js behavior It is a + // We set a default value for this to mimic Node.js behavior It is a // separate from the user-facing value so that we can tell if the user // really set it or if it's just the default value. // @@ -484,10 +485,11 @@ public: #include "ZigGeneratedClasses+lazyStructureHeader.h" + void finishCreation(JSC::VM&); + private: void addBuiltinGlobals(JSC::VM&); - void finishCreation(JSC::VM&); friend void WebCore::JSBuiltinInternalFunctions::initialize(Zig::GlobalObject&); WebCore::JSBuiltinInternalFunctions m_builtinInternalFunctions; std::unique_ptr m_constructors; diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 8ee1a49517..e8addc06b1 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1,5 +1,3 @@ - - #include "root.h" #include "JavaScriptCore/Exception.h" @@ -2289,19 +2287,15 @@ bool JSC__JSValue__jestDeepMatch(JSC__JSValue JSValue0, JSC__JSValue JSValue1, J return Bun__deepMatch(obj, subset, globalObject, &scope, replacePropsWithAsymmetricMatchers, false); } -// This is the same as the C API version, except it returns a JSValue which may be a *Exception -// We want that so we can return stack traces. -extern "C" JSC__JSValue JSObjectCallAsFunctionReturnValue(JSContextRef ctx, JSC__JSValue object, +extern "C" JSC__JSValue Bun__JSValue__call(JSContextRef ctx, JSC__JSValue object, JSC__JSValue thisObject, size_t argumentCount, const JSValueRef* arguments) { JSC::JSGlobalObject* globalObject = toJS(ctx); JSC::VM& vm = globalObject->vm(); -#if BUN_DEBUG // This is a redundant check, but we add it to make the error message clearer. ASSERT_WITH_MESSAGE(!vm.isCollectorBusyOnCurrentThread(), "Cannot call function inside a finalizer or while GC is running on same thread."); -#endif if (UNLIKELY(!object)) return JSC::JSValue::encode(JSC::JSValue()); @@ -2336,8 +2330,10 @@ extern "C" JSC__JSValue JSObjectCallAsFunctionReturnValue(JSContextRef ctx, JSC_ asyncContextData->putInternalField(vm, 0, restoreAsyncContext); } + auto scope = DECLARE_THROW_SCOPE(globalObject->vm()); if (returnedException.get()) { - return JSC::JSValue::encode(JSC::JSValue(returnedException.get())); + scope.throwException(globalObject, returnedException.get()); + return JSC::JSValue::encode({}); } return JSC::JSValue::encode(result); @@ -2762,7 +2758,7 @@ JSC__JSValue JSC__JSValue__createStringArray(JSC__JSGlobalObject* globalObject, } JSC__JSValue JSC__JSGlobalObject__createAggregateError(JSC__JSGlobalObject* globalObject, - void** errors, uint16_t errors_count, + const JSValue* errors, size_t errors_count, const ZigString* arg3) { JSC::VM& vm = globalObject->vm(); @@ -2778,9 +2774,8 @@ JSC__JSValue JSC__JSGlobalObject__createAggregateError(JSC__JSGlobalObject* glob globalObject->arrayStructureForIndexingTypeDuringAllocation(JSC::ArrayWithContiguous), errors_count))) { - for (uint16_t i = 0; i < errors_count; ++i) { - array->initializeIndexWithoutBarrier( - initializationScope, i, JSC::JSValue(reinterpret_cast(errors[i]))); + for (size_t i = 0; i < errors_count; ++i) { + array->initializeIndexWithoutBarrier(initializationScope, i, errors[i]); } } } @@ -4878,6 +4873,23 @@ void JSC__VM__holdAPILock(JSC__VM* arg0, void* ctx, void (*callback)(void* arg0) callback(ctx); } +// The following two functions are copied 1:1 from JSLockHolder to provide a +// new, more ergonomic binding for interacting with the lock from Zig +// https://github.com/WebKit/WebKit/blob/main/Source/JavaScriptCore/runtime/JSLock.cpp + +extern "C" void JSC__VM__getAPILock(JSC::VM* vm) +{ + // https://github.com/WebKit/WebKit/blob/6cb5017d237ef7cb898582a22f05acca22322845/Source/JavaScriptCore/runtime/JSLock.cpp#L67 + vm->apiLock().lock(); +} + +extern "C" void JSC__VM__releaseAPILock(JSC::VM* vm) +{ + // https://github.com/WebKit/WebKit/blob/6cb5017d237ef7cb898582a22f05acca22322845/Source/JavaScriptCore/runtime/JSLock.cpp#L72 + RefPtr apiLock(&vm->apiLock()); + apiLock->unlock(); +} + void JSC__JSString__iterator(JSC__JSString* arg0, JSC__JSGlobalObject* arg1, void* arg2) { jsstring_iterator* iter = (jsstring_iterator*)arg2; @@ -5819,6 +5831,23 @@ extern "C" bool JSGlobalObject__hasException(JSC::JSGlobalObject* globalObject) return DECLARE_CATCH_SCOPE(globalObject->vm()).exception() != 0; } +extern "C" void JSGlobalObject__clearException(JSC::JSGlobalObject* globalObject) +{ + DECLARE_CATCH_SCOPE(globalObject->vm()).clearException(); +} + +extern "C" JSC::EncodedJSValue JSGlobalObject__tryTakeException(JSC::JSGlobalObject* globalObject) +{ + auto scope = DECLARE_CATCH_SCOPE(globalObject->vm()); + + if (auto exception = scope.exception()) { + scope.clearException(); + return JSC::JSValue::encode(exception); + } + + return {}; +} + CPP_DECL bool JSC__GetterSetter__isGetterNull(JSC__GetterSetter* gettersetter) { return gettersetter->isGetterNull(); diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index ded3c9e85b..97c876a47d 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -2168,9 +2168,9 @@ pub const JSPromise = extern struct { pub const namespace = "JSC"; pub const Status = enum(u32) { - Pending = 0, // Making this as 0, so that, we can change the status from Pending to others without masking. - Fulfilled = 1, - Rejected = 2, + pending = 0, // Making this as 0, so that, we can change the status from Pending to others without masking. + fulfilled = 1, + rejected = 2, }; pub fn Weak(comptime T: type) type { @@ -2345,10 +2345,7 @@ pub const JSPromise = extern struct { return JSC__JSPromise__wrap(globalObject, &ctx, @ptrCast(&Wrapper.call)); } - pub fn wrapValue( - globalObject: *JSGlobalObject, - value: JSValue, - ) JSValue { + pub fn wrapValue(globalObject: *JSGlobalObject, value: JSValue) JSValue { if (value.isEmpty()) { return resolvedPromiseValue(globalObject, JSValue.jsUndefined()); } else if (value.isEmptyOrUndefinedOrNull() or !value.isCell()) { @@ -2474,6 +2471,25 @@ pub const JSPromise = extern struct { "status", // "rejectException", }; + + pub const Unwrapped = union(enum) { + pending, + fulfilled: JSValue, + rejected: JSValue, + }; + + pub const UnwrapMode = enum { mark_handled, leave_unhandled }; + + pub fn unwrap(promise: *JSPromise, vm: *VM, mode: UnwrapMode) Unwrapped { + return switch (promise.status(vm)) { + .pending => .pending, + .fulfilled => .{ .fulfilled = promise.result(vm) }, + .rejected => { + if (mode == .mark_handled) promise.setHandled(vm); + return .{ .rejected = promise.result(vm) }; + }, + }; + } }; pub const JSInternalPromise = extern struct { @@ -2497,6 +2513,17 @@ pub const JSInternalPromise = extern struct { cppFn("setHandled", .{ this, vm }); } + pub fn unwrap(promise: *JSInternalPromise, vm: *VM, mode: JSPromise.UnwrapMode) JSPromise.Unwrapped { + return switch (promise.status(vm)) { + .pending => .pending, + .fulfilled => .{ .fulfilled = promise.result(vm) }, + .rejected => { + if (mode == .mark_handled) promise.setHandled(vm); + return .{ .rejected = promise.result(vm) }; + }, + }; + } + pub fn resolvedPromise(globalThis: *JSGlobalObject, value: JSValue) *JSInternalPromise { return cppFn("resolvedPromise", .{ globalThis, value }); } @@ -2692,9 +2719,14 @@ pub const JSInternalPromise = extern struct { }; pub const AnyPromise = union(enum) { - Normal: *JSPromise, - Internal: *JSInternalPromise, + normal: *JSPromise, + internal: *JSInternalPromise, + pub fn unwrap(this: AnyPromise, vm: *VM, mode: JSPromise.UnwrapMode) JSPromise.Unwrapped { + return switch (this) { + inline else => |promise| promise.unwrap(vm, mode), + }; + } pub fn status(this: AnyPromise, vm: *VM) JSPromise.Status { return switch (this) { inline else => |promise| promise.status(vm), @@ -2721,27 +2753,32 @@ pub const AnyPromise = union(enum) { inline else => |promise| promise.resolve(globalThis, value), } } + pub fn reject(this: AnyPromise, globalThis: *JSGlobalObject, value: JSValue) void { switch (this) { inline else => |promise| promise.reject(globalThis, value), } } + pub fn rejectAsHandled(this: AnyPromise, globalThis: *JSGlobalObject, value: JSValue) void { switch (this) { inline else => |promise| promise.rejectAsHandled(globalThis, value), } } + pub fn rejectAsHandledException(this: AnyPromise, globalThis: *JSGlobalObject, value: *Exception) void { switch (this) { inline else => |promise| promise.rejectAsHandledException(globalThis, value), } } + pub fn asValue(this: AnyPromise, globalThis: *JSGlobalObject) JSValue { return switch (this) { - .Normal => |promise| promise.asValue(globalThis), - .Internal => |promise| promise.asValue(), + .normal => |promise| promise.asValue(globalThis), + .internal => |promise| promise.asValue(), }; } + extern fn JSC__AnyPromise__wrap(*JSC.JSGlobalObject, JSValue, *anyopaque, *const fn (*anyopaque, *JSC.JSGlobalObject) callconv(.C) JSC.JSValue) void; pub fn wrap( @@ -2867,11 +2904,8 @@ pub const JSGlobalObject = opaque { this.throwValue(err); } - extern fn JSGlobalObject__clearTerminationException(this: *JSGlobalObject) void; - extern fn JSGlobalObject__throwTerminationException(this: *JSGlobalObject) void; pub const throwTerminationException = JSGlobalObject__throwTerminationException; pub const clearTerminationException = JSGlobalObject__clearTerminationException; - extern fn JSGlobalObject__setTimeZone(this: *JSGlobalObject, timeZone: *const ZigString) bool; pub fn setTimeZone(this: *JSGlobalObject, timeZone: *const ZigString) bool { return JSGlobalObject__setTimeZone(this, timeZone); @@ -3196,9 +3230,9 @@ pub const JSGlobalObject = opaque { return this; } - extern fn JSC__JSGlobalObject__createAggregateError(*JSGlobalObject, [*]*anyopaque, u16, *const ZigString) JSValue; - pub fn createAggregateError(globalObject: *JSGlobalObject, errors: [*]*anyopaque, errors_len: u16, message: *const ZigString) JSValue { - return JSC__JSGlobalObject__createAggregateError(globalObject, errors, errors_len, message); + extern fn JSC__JSGlobalObject__createAggregateError(*JSGlobalObject, [*]const JSValue, usize, *const ZigString) JSValue; + pub fn createAggregateError(globalObject: *JSGlobalObject, errors: []const JSValue, message: *const ZigString) JSValue { + return JSC__JSGlobalObject__createAggregateError(globalObject, errors.ptr, errors.len, message); } extern fn JSC__JSGlobalObject__generateHeapSnapshot(*JSGlobalObject) JSValue; @@ -3206,22 +3240,56 @@ pub const JSGlobalObject = opaque { return JSC__JSGlobalObject__generateHeapSnapshot(this); } - extern fn JSGlobalObject__hasException(*JSGlobalObject) bool; pub fn hasException(this: *JSGlobalObject) bool { return JSGlobalObject__hasException(this); } - extern fn JSC__JSGlobalObject__vm(*JSGlobalObject) *VM; + pub fn clearException(this: *JSGlobalObject) void { + return JSGlobalObject__clearException(this); + } + + /// Clears the current exception and returns that value. Requires compile-time + /// proof of an exception via `error.JSError` + pub fn takeException(this: *JSGlobalObject, proof: bun.JSError) JSValue { + switch (proof) { + error.JSError => {}, + } + + if (bun.Environment.allow_assert) + bun.assert(this.hasException()); + + return this.tryTakeException() orelse { + bun.assert(false); + return .zero; + }; + } + + pub fn tryTakeException(this: *JSGlobalObject) ?JSValue { + const value = JSGlobalObject__tryTakeException(this); + if (value == .zero) return null; + return value; + } + + /// This is for the common scenario you are calling into JavaScript, but there is + /// no logical way to handle a thrown exception other than to treat it as unhandled. + /// + /// The pattern: + /// + /// const result = value.call(...) catch |err| + /// return global.reportActiveExceptionAsUnhandled(err); + /// + pub fn reportActiveExceptionAsUnhandled(this: *JSGlobalObject, err: bun.JSError) void { + _ = this.bunVM().uncaughtException(this, this.takeException(err), false); + } + pub fn vm(this: *JSGlobalObject) *VM { return JSC__JSGlobalObject__vm(this); } - extern fn JSC__JSGlobalObject__deleteModuleRegistryEntry(*JSGlobalObject, *const ZigString) void; pub fn deleteModuleRegistryEntry(this: *JSGlobalObject, name_: *ZigString) void { return JSC__JSGlobalObject__deleteModuleRegistryEntry(this, name_); } - extern fn JSC__JSGlobalObject__bunVM(*JSGlobalObject) *anyopaque; fn bunVMUnsafe(this: *JSGlobalObject) *anyopaque { return JSC__JSGlobalObject__bunVM(this); } @@ -3401,6 +3469,16 @@ pub const JSGlobalObject = opaque { } pub usingnamespace @import("ErrorCode").JSGlobalObjectExtensions; + + extern fn JSC__JSGlobalObject__bunVM(*JSGlobalObject) *VM; + extern fn JSC__JSGlobalObject__vm(*JSGlobalObject) *VM; + extern fn JSC__JSGlobalObject__deleteModuleRegistryEntry(*JSGlobalObject, *const ZigString) void; + extern fn JSGlobalObject__clearException(*JSGlobalObject) void; + extern fn JSGlobalObject__clearTerminationException(this: *JSGlobalObject) void; + extern fn JSGlobalObject__hasException(*JSGlobalObject) bool; + extern fn JSGlobalObject__setTimeZone(this: *JSGlobalObject, timeZone: *const ZigString) bool; + extern fn JSGlobalObject__tryTakeException(*JSGlobalObject) JSValue; + extern fn JSGlobalObject__throwTerminationException(this: *JSGlobalObject) void; }; pub const JSNativeFn = JSHostFunctionPtr; @@ -3951,42 +4029,38 @@ pub const JSValue = enum(JSValueReprInt) { return cppFn("isInstanceOf", .{ this, global, constructor }); } - pub fn callWithGlobalThis(this: JSValue, globalThis: *JSGlobalObject, args: []const JSC.JSValue) JSC.JSValue { - JSC.markBinding(@src()); - if (comptime bun.Environment.isDebug) { - const loop = JSC.VirtualMachine.get().eventLoop(); - loop.debug.js_call_count_outside_tick_queue += @as(usize, @intFromBool(!loop.debug.is_inside_tick_queue)); - if (loop.debug.track_last_fn_name and !loop.debug.is_inside_tick_queue) { - loop.debug.last_fn_name.deref(); - loop.debug.last_fn_name = this.getName(globalThis); - } - } - return JSC.C.JSObjectCallAsFunctionReturnValue( - globalThis, - this, - globalThis.toJSValue(), - args.len, - @as(?[*]const JSC.C.JSValueRef, @ptrCast(args.ptr)), - ); + pub fn callWithGlobalThis(this: JSValue, globalThis: *JSGlobalObject, args: []const JSC.JSValue) !JSC.JSValue { + return this.call(globalThis, globalThis.toJSValue(), args); } - pub fn call(this: JSValue, globalThis: *JSGlobalObject, thisValue: JSC.JSValue, args: []const JSC.JSValue) JSC.JSValue { + pub extern "c" fn Bun__JSValue__call( + ctx: *JSGlobalObject, + object: JSValue, + thisObject: JSValue, + argumentCount: usize, + arguments: [*]const JSValue, + ) JSValue; + + pub fn call(function: JSValue, global: *JSGlobalObject, thisValue: JSC.JSValue, args: []const JSC.JSValue) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); if (comptime bun.Environment.isDebug) { const loop = JSC.VirtualMachine.get().eventLoop(); loop.debug.js_call_count_outside_tick_queue += @as(usize, @intFromBool(!loop.debug.is_inside_tick_queue)); if (loop.debug.track_last_fn_name and !loop.debug.is_inside_tick_queue) { loop.debug.last_fn_name.deref(); - loop.debug.last_fn_name = this.getName(globalThis); + loop.debug.last_fn_name = function.getName(global); } } - return JSC.C.JSObjectCallAsFunctionReturnValue( - globalThis, - this, + + const value = Bun__JSValue__call( + global, + function, thisValue, args.len, - @as(?[*]const JSC.C.JSValueRef, @ptrCast(args.ptr)), + args.ptr, ); + if (value == .zero) return error.JSError; + return value; } /// The value cannot be empty. Check `!this.isEmpty()` before calling this function @@ -4372,12 +4446,12 @@ pub const JSValue = enum(JSValueReprInt) { if (value.isEmptyOrUndefinedOrNull()) return null; if (value.asInternalPromise()) |promise| { return AnyPromise{ - .Internal = promise, + .internal = promise, }; } if (value.asPromise()) |promise| { return AnyPromise{ - .Normal = promise, + .normal = promise, }; } return null; @@ -5769,7 +5843,7 @@ pub const JSValue = enum(JSValueReprInt) { "jestDeepMatch", }; - // For any callback JSValue created in JS that you will not call *immediatly*, you must wrap it + // For any callback JSValue created in JS that you will not call *immediately*, you must wrap it // in an AsyncContextFrame with this function. This allows AsyncLocalStorage to work by // snapshotting it's state and restoring it when called. // - If there is no current context, this returns the callback as-is. @@ -5911,10 +5985,27 @@ pub const VM = extern struct { return cppFn("isJITEnabled", .{}); } + /// deprecated in favor of getAPILock to avoid an annoying callback wrapper pub fn holdAPILock(this: *VM, ctx: ?*anyopaque, callback: *const fn (ctx: ?*anyopaque) callconv(.C) void) void { cppFn("holdAPILock", .{ this, ctx, callback }); } + extern fn JSC__VM__getAPILock(vm: *VM) void; + extern fn JSC__VM__releaseAPILock(vm: *VM) void; + + /// See `JSLock.h` in WebKit for more detail on how the API lock prevents races. + pub fn getAPILock(vm: *VM) Lock { + JSC__VM__getAPILock(vm); + return .{ .vm = vm }; + } + + pub const Lock = struct { + vm: *VM, + pub fn release(lock: Lock) void { + JSC__VM__releaseAPILock(lock.vm); + } + }; + pub fn deferGC(this: *VM, ctx: ?*anyopaque, callback: *const fn (ctx: ?*anyopaque) callconv(.C) void) void { cppFn("deferGC", .{ this, ctx, callback }); } diff --git a/src/bun.js/bindings/headers.h b/src/bun.js/bindings/headers.h index 21c9e059c9..6d5efdb701 100644 --- a/src/bun.js/bindings/headers.h +++ b/src/bun.js/bindings/headers.h @@ -276,7 +276,7 @@ CPP_DECL void JSC__JSFunction__optimizeSoon(JSC__JSValue JSValue0); #pragma mark - JSC::JSGlobalObject CPP_DECL VirtualMachine* JSC__JSGlobalObject__bunVM(JSC__JSGlobalObject* arg0); -CPP_DECL JSC__JSValue JSC__JSGlobalObject__createAggregateError(JSC__JSGlobalObject* arg0, void** arg1, uint16_t arg2, const ZigString* arg3); +CPP_DECL JSC__JSValue JSC__JSGlobalObject__createAggregateError(JSC__JSGlobalObject* arg0, const JSC::JSValue* arg1, size_t arg2, const ZigString* arg3); CPP_DECL void JSC__JSGlobalObject__createSyntheticModule_(JSC__JSGlobalObject* arg0, ZigString* arg1, size_t arg2, JSC__JSValue* arg3, size_t arg4); CPP_DECL void JSC__JSGlobalObject__deleteModuleRegistryEntry(JSC__JSGlobalObject* arg0, ZigString* arg1); CPP_DECL JSC__JSValue JSC__JSGlobalObject__generateHeapSnapshot(JSC__JSGlobalObject* arg0); diff --git a/src/bun.js/bindings/headers.zig b/src/bun.js/bindings/headers.zig index 0803468db9..3b64ff6905 100644 --- a/src/bun.js/bindings/headers.zig +++ b/src/bun.js/bindings/headers.zig @@ -176,7 +176,6 @@ pub extern fn JSC__JSInternalPromise__setHandled(arg0: [*c]bindings.JSInternalPr pub extern fn JSC__JSInternalPromise__status(arg0: [*c]const JSC__JSInternalPromise, arg1: *bindings.VM) u32; pub extern fn JSC__JSFunction__optimizeSoon(JSValue0: JSC__JSValue) void; pub extern fn JSC__JSGlobalObject__bunVM(arg0: *bindings.JSGlobalObject) ?*bindings.VirtualMachine; -pub extern fn JSC__JSGlobalObject__createAggregateError(arg0: *bindings.JSGlobalObject, arg1: [*c]*anyopaque, arg2: u16, arg3: [*c]const ZigString) JSC__JSValue; pub extern fn JSC__JSGlobalObject__createSyntheticModule_(arg0: *bindings.JSGlobalObject, arg1: [*c]ZigString, arg2: usize, arg3: [*c]bindings.JSValue, arg4: usize) void; pub extern fn JSC__JSGlobalObject__deleteModuleRegistryEntry(arg0: *bindings.JSGlobalObject, arg1: [*c]ZigString) void; pub extern fn JSC__JSGlobalObject__generateHeapSnapshot(arg0: *bindings.JSGlobalObject) JSC__JSValue; diff --git a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h index 0059dd1401..79da6f30f0 100644 --- a/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h @@ -900,6 +900,8 @@ public: // std::unique_ptr m_clientSubspaceForXPathResult; // std::unique_ptr m_clientSubspaceForXSLTProcessor; + std::unique_ptr m_clientSubspaceForKitGlobalScope; + std::unique_ptr m_clientSubspaceForAbortController; std::unique_ptr m_clientSubspaceForAbortSignal; std::unique_ptr m_clientSubspaceForErrorEvent; diff --git a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h index 03cd9e07f9..27bd00eab7 100644 --- a/src/bun.js/bindings/webcore/DOMIsoSubspaces.h +++ b/src/bun.js/bindings/webcore/DOMIsoSubspaces.h @@ -892,6 +892,8 @@ public: // std::unique_ptr m_subspaceForXPathNSResolver; // std::unique_ptr m_subspaceForXPathResult; // std::unique_ptr m_subspaceForXSLTProcessor; + + std::unique_ptr m_subspaceForKitGlobalScope; std::unique_ptr m_subspaceForAbortController; std::unique_ptr m_subspaceForAbortSignal; diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 14206bf82c..ca50477234 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -485,6 +485,9 @@ pub const Task = TaggedPointerUnion(.{ TimerObject, bun.shell.Interpreter.Builtin.Yes.YesTask, + bun.kit.DevServer.BundleTask, + bun.kit.DevServer.HotReloadTask, + ProcessWaiterThreadTask, RuntimeTranspilerStore, ServerAllConnectionsClosedTask, @@ -878,11 +881,8 @@ pub const EventLoop = struct { this.enter(); defer this.exit(); - const result = callback.call(globalObject, thisValue, arguments); - - if (result.toError()) |err| { - _ = this.virtual_machine.uncaughtException(globalObject, err, false); - } + _ = callback.call(globalObject, thisValue, arguments) catch |err| + globalObject.reportActiveExceptionAsUnhandled(err); } fn tickQueueWithCount(this: *EventLoop, virtual_machine: *VirtualMachine, comptime queue_name: []const u8) u32 { @@ -1030,6 +1030,13 @@ pub const EventLoop = struct { // special case: we return return 0; }, + @field(Task.Tag, @typeName(bun.kit.DevServer.HotReloadTask)) => { + const transform_task = task.get(bun.kit.DevServer.HotReloadTask).?; + transform_task.*.run(); + transform_task.deinit(); + // special case: we return + return 0; + }, @field(Task.Tag, typeBaseName(@typeName(FSWatchTask))) => { var transform_task: *FSWatchTask = task.get(FSWatchTask).?; transform_task.*.run(); @@ -1250,6 +1257,9 @@ pub const EventLoop = struct { var any: *ServerAllConnectionsClosedTask = task.get(ServerAllConnectionsClosedTask).?; any.runFromJSThread(virtual_machine); }, + @field(Task.Tag, typeBaseName(@typeName(bun.kit.DevServer.BundleTask))) => { + task.get(bun.kit.DevServer.BundleTask).?.completeOnMainThread(); + }, else => if (Environment.allow_assert) { bun.Output.prettyln("\nUnexpected tag: {s}\n", .{@tagName(task.tag())}); @@ -1514,11 +1524,11 @@ pub const EventLoop = struct { pub fn waitForPromise(this: *EventLoop, promise: JSC.AnyPromise) void { switch (promise.status(this.virtual_machine.jsc)) { - JSC.JSPromise.Status.Pending => { - while (promise.status(this.virtual_machine.jsc) == .Pending) { + .pending => { + while (promise.status(this.virtual_machine.jsc) == .pending) { this.tick(); - if (promise.status(this.virtual_machine.jsc) == .Pending) { + if (promise.status(this.virtual_machine.jsc) == .pending) { this.autoTick(); } } @@ -1530,11 +1540,11 @@ pub const EventLoop = struct { pub fn waitForPromiseWithTermination(this: *EventLoop, promise: JSC.AnyPromise) void { const worker = this.virtual_machine.worker orelse @panic("EventLoop.waitForPromiseWithTermination: worker is not initialized"); switch (promise.status(this.virtual_machine.jsc)) { - JSC.JSPromise.Status.Pending => { - while (!worker.hasRequestedTerminate() and promise.status(this.virtual_machine.jsc) == .Pending) { + .pending => { + while (!worker.hasRequestedTerminate() and promise.status(this.virtual_machine.jsc) == .pending) { this.tick(); - if (!worker.hasRequestedTerminate() and promise.status(this.virtual_machine.jsc) == .Pending) { + if (!worker.hasRequestedTerminate() and promise.status(this.virtual_machine.jsc) == .pending) { this.autoTick(); } } diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 64a476e67c..94c07d4d24 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -791,7 +791,7 @@ pub const VirtualMachine = struct { /// ["baz", "--bar"] /// "bun foo /// [] - argv: []const []const u8 = &[_][]const u8{"bun"}, + argv: []const []const u8 = &[_][]const u8{}, origin_timer: std.time.Timer = undefined, origin_timestamp: u64 = 0, @@ -895,7 +895,7 @@ pub const VirtualMachine = struct { }; } - const VMHolder = struct { + pub const VMHolder = struct { pub threadlocal var vm: ?*VirtualMachine = null; pub threadlocal var cached_global_object: ?*JSGlobalObject = null; pub export fn Bun__setDefaultGlobalObject(global: *JSGlobalObject) void { @@ -1893,6 +1893,93 @@ pub const VirtualMachine = struct { return vm; } + pub fn initKit(opts: Options) anyerror!*VirtualMachine { + JSC.markBinding(@src()); + const allocator = opts.allocator; + var log: *logger.Log = undefined; + if (opts.log) |__log| { + log = __log; + } else { + log = try allocator.create(logger.Log); + log.* = logger.Log.init(allocator); + } + + VMHolder.vm = try allocator.create(VirtualMachine); + const console = try allocator.create(ConsoleObject); + console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); + const bundler = try Bundler.init( + allocator, + log, + try Config.configureTransformOptionsForBunVM(allocator, opts.args), + opts.env_loader, + ); + var vm = VMHolder.vm.?; + + vm.* = VirtualMachine{ + .global = undefined, + .transpiler_store = RuntimeTranspilerStore.init(), + .allocator = allocator, + .entry_point = ServerEntryPoint{}, + .bundler = bundler, + .console = console, + .log = log, + .flush_list = std.ArrayList(string).init(allocator), + .origin = bundler.options.origin, + .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), + .source_mappings = undefined, + .macros = MacroMap.init(allocator), + .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), + .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), + .origin_timestamp = getOriginTimestamp(), + .ref_strings = JSC.RefString.Map.init(allocator), + .ref_strings_mutex = .{}, + .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId() else {}, + }; + vm.source_mappings.init(&vm.saved_source_map_table); + vm.regular_event_loop.tasks = EventLoop.Queue.init( + default_allocator, + ); + vm.regular_event_loop.immediate_tasks = EventLoop.Queue.init( + default_allocator, + ); + vm.regular_event_loop.next_immediate_tasks = EventLoop.Queue.init( + default_allocator, + ); + vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; + vm.regular_event_loop.concurrent_tasks = .{}; + vm.event_loop = &vm.regular_event_loop; + + vm.bundler.macro_context = null; + vm.bundler.resolver.store_fd = opts.store_fd; + vm.bundler.resolver.prefer_module_field = false; + + vm.bundler.resolver.onWakePackageManager = .{ + .context = &vm.modules, + .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, + .onDependencyError = JSC.ModuleLoader.AsyncModule.Queue.onDependencyError, + }; + + vm.bundler.configureLinker(); + try vm.bundler.configureFramework(false); + + vm.bundler.macro_context = js_ast.Macro.MacroContext.init(&vm.bundler); + + if (opts.args.serve orelse false) { + vm.bundler.linker.onImportCSS = Bun.onImportCSS; + } + + vm.regular_event_loop.virtual_machine = vm; + vm.smol = opts.smol; + + if (opts.smol) + is_smol_mode = opts.smol; + + vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); + + return vm; + } + pub threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; pub fn clearRefString(_: *anyopaque, ref_string: *JSC.RefString) void { @@ -2450,7 +2537,7 @@ pub const VirtualMachine = struct { return; }, else => { - var errors_stack: [256]*anyopaque = undefined; + var errors_stack: [256]JSValue = undefined; const len = @min(log.msgs.items.len, errors_stack.len); const errors = errors_stack[0..len]; @@ -2458,21 +2545,20 @@ pub const VirtualMachine = struct { for (logs, errors) |msg, *current| { current.* = switch (msg.metadata) { - .build => BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid(), + .build => BuildMessage.create(globalThis, globalThis.allocator(), msg), .resolve => ResolveMessage.create( globalThis, globalThis.allocator(), msg, referrer.toUTF8(bun.default_allocator).slice(), - ).asVoid(), + ), }; } ret.* = ErrorableResolvedSource.err( err, globalThis.createAggregateError( - errors.ptr, - @as(u16, @intCast(errors.len)), + errors, &ZigString.init( std.fmt.allocPrint(globalThis.allocator(), "{d} errors building \"{}\"", .{ errors.len, @@ -2619,11 +2705,11 @@ pub const VirtualMachine = struct { if (this.isWatcherEnabled()) { this.eventLoop().performGC(); switch (this.pending_internal_promise.status(this.global.vm())) { - JSC.JSPromise.Status.Pending => { - while (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + .pending => { + while (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().tick(); - if (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + if (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().autoTick(); } } @@ -2633,11 +2719,11 @@ pub const VirtualMachine = struct { } else { this.eventLoop().performGC(); this.waitForPromise(JSC.AnyPromise{ - .Internal = promise, + .internal = promise, }); } - if (promise.status(this.global.vm()) == .Rejected) + if (promise.status(this.global.vm()) == .rejected) return promise; } @@ -2718,7 +2804,7 @@ pub const VirtualMachine = struct { const promise = try this.reloadEntryPoint(entry_path); this.eventLoop().performGC(); this.eventLoop().waitForPromiseWithTermination(JSC.AnyPromise{ - .Internal = promise, + .internal = promise, }); if (this.worker) |worker| { if (worker.hasRequestedTerminate()) { @@ -2735,11 +2821,11 @@ pub const VirtualMachine = struct { if (this.isWatcherEnabled()) { this.eventLoop().performGC(); switch (this.pending_internal_promise.status(this.global.vm())) { - JSC.JSPromise.Status.Pending => { - while (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + .pending => { + while (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().tick(); - if (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + if (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().autoTick(); } } @@ -2747,14 +2833,12 @@ pub const VirtualMachine = struct { else => {}, } } else { - if (promise.status(this.global.vm()) == .Rejected) { + if (promise.status(this.global.vm()) == .rejected) { return promise; } this.eventLoop().performGC(); - this.waitForPromise(JSC.AnyPromise{ - .Internal = promise, - }); + this.waitForPromise(.{ .internal = promise }); } this.eventLoop().autoTick(); @@ -2769,11 +2853,11 @@ pub const VirtualMachine = struct { if (this.isWatcherEnabled()) { this.eventLoop().performGC(); switch (this.pending_internal_promise.status(this.global.vm())) { - JSC.JSPromise.Status.Pending => { - while (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + .pending => { + while (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().tick(); - if (this.pending_internal_promise.status(this.global.vm()) == .Pending) { + if (this.pending_internal_promise.status(this.global.vm()) == .pending) { this.eventLoop().autoTick(); } } @@ -2781,14 +2865,12 @@ pub const VirtualMachine = struct { else => {}, } } else { - if (promise.status(this.global.vm()) == .Rejected) { + if (promise.status(this.global.vm()) == .rejected) { return promise; } this.eventLoop().performGC(); - this.waitForPromise(JSC.AnyPromise{ - .Internal = promise, - }); + this.waitForPromise(.{ .internal = promise }); } return this.pending_internal_promise; @@ -2849,12 +2931,22 @@ pub const VirtualMachine = struct { promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(entry_path)) orelse return null; this.waitForPromise(JSC.AnyPromise{ - .Internal = promise, + .internal = promise, }); return promise; } + pub fn printErrorLikeObjectSimple(this: *VirtualMachine, value: JSValue, writer: anytype, comptime escape_codes: bool) void { + this.printErrorlikeObject(value, null, null, @TypeOf(writer), writer, escape_codes, false); + } + + pub fn printErrorLikeObjectToConsole(this: *VirtualMachine, value: JSValue) void { + switch (Output.enable_ansi_colors_stderr) { + inline else => |colors| this.printErrorLikeObjectSimple(value, Output.errorWriter(), colors), + } + } + // When the Error-like object is one of our own, it's best to rely on the object directly instead of serializing it to a ZigException. // This is for: // - BuildMessage @@ -3945,13 +4037,39 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime pub const Watcher = GenericWatcher.NewWatcher(*@This()); const Reloader = @This(); - onAccept: std.ArrayHashMapUnmanaged(GenericWatcher.HashType, bun.BabyList(OnAcceptCallback), bun.ArrayIdentityContext, false) = .{}, ctx: *Ctx, verbose: bool = false, pending_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), tombstones: bun.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{}, + pub fn init(ctx: *Ctx, fs: *bun.fs.FileSystem, verbose: bool, clear_screen_flag: bool) *@This().Watcher { + const reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); + reloader.* = .{ + .ctx = ctx, + .verbose = Environment.enable_logs or verbose, + }; + + clear_screen = clear_screen_flag; + const watcher = @This().Watcher.init(reloader, fs, bun.default_allocator) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); + }; + watcher.start() catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to start File Watcher: {s}", .{@errorName(err)}); + }; + return watcher; + } + + fn debug(comptime fmt: string, args: anytype) void { + if (Environment.enable_logs) { + Output.scoped(.hot_reloader, false)(fmt, args); + } else { + Output.prettyErrorln("watcher: " ++ fmt, args); + } + } + pub fn eventLoop(this: @This()) *EventLoopType { return this.ctx.eventLoop(); } @@ -4008,7 +4126,8 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime if (comptime reload_immediately) { Output.flush(); if (comptime Ctx == ImportWatcher) { - this.reloader.ctx.rareData().closeAllListenSocketsForWatchMode(); + if (this.reloader.ctx.rare_data) |rare| + rare.closeAllListenSocketsForWatchMode(); } bun.reloadProcess(bun.default_allocator, clear_screen, false); unreachable; @@ -4030,21 +4149,6 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime } }; - fn NewCallback(comptime FunctionSignature: type) type { - return union(enum) { - javascript_callback: JSC.Strong, - zig_callback: struct { - ptr: *anyopaque, - function: *const FunctionSignature, - }, - }; - } - - pub const OnAcceptCallback = NewCallback(fn ( - vm: *JSC.VirtualMachine, - specifier: []const u8, - ) void); - pub fn enableHotModuleReloading(this: *Ctx) void { if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { if (this.bun_watcher != .none) @@ -4057,7 +4161,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime var reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); reloader.* = .{ .ctx = this, - .verbose = if (@hasField(Ctx, "log")) this.log.level.atLeast(.info) else false, + .verbose = Environment.enable_logs or if (@hasField(Ctx, "log")) this.log.level.atLeast(.info) else false, }; if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { @@ -4136,8 +4240,10 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime } else { return this.ctx.bun_watcher.hot; } - } else { + } else if (@typeInfo(@TypeOf(this.ctx.bun_watcher)) == .Optional) { return this.ctx.bun_watcher.?; + } else { + return this.ctx.bun_watcher; } } @@ -4147,18 +4253,18 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime changed_files: []?[:0]u8, watchlist: GenericWatcher.WatchList, ) void { - var slice = watchlist.slice(); + const slice = watchlist.slice(); const file_paths = slice.items(.file_path); - var counts = slice.items(.count); + const counts = slice.items(.count); const kinds = slice.items(.kind); const hashes = slice.items(.hash); const parents = slice.items(.parent_hash); const file_descriptors = slice.items(.fd); - var ctx = this.getContext(); + const ctx = this.getContext(); defer ctx.flushEvictions(); defer Output.flush(); - var bundler = if (@TypeOf(this.ctx.bundler) == *bun.Bundler) + const bundler = if (@TypeOf(this.ctx.bundler) == *bun.Bundler) this.ctx.bundler else &this.ctx.bundler; @@ -4184,9 +4290,8 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime // const path = Fs.PathName.init(file_path); const id = hashes[event.index]; - if (comptime Environment.isDebug) { - Output.prettyErrorln("[watch] {s} ({s}, {})", .{ file_path, @tagName(kind), event.op }); - } + if (this.verbose) + debug("onFileUpdate {s} ({s}, {})", .{ file_path, @tagName(kind), event.op }); switch (kind) { .file => { @@ -4200,7 +4305,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime } if (this.verbose) - Output.prettyErrorln("File changed: {s}", .{fs.relativeTo(file_path)}); + debug("File changed: {s}", .{fs.relativeTo(file_path)}); if (event.op.write or event.op.delete or event.op.rename) { current_task.append(id); @@ -4322,13 +4427,13 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime last_file_hash = file_hash; if (this.verbose) - Output.prettyErrorln(" File change: {s}", .{fs.relativeTo(abs_path)}); + debug("File change: {s}", .{fs.relativeTo(abs_path)}); } } } if (this.verbose) { - Output.prettyErrorln(" Dir change: {s}", .{fs.relativeTo(file_path)}); + debug("Dir change: {s}", .{fs.relativeTo(file_path)}); } }, } diff --git a/src/bun.js/javascript_core_c_api.zig b/src/bun.js/javascript_core_c_api.zig index e900e54af8..3bba44b331 100644 --- a/src/bun.js/javascript_core_c_api.zig +++ b/src/bun.js/javascript_core_c_api.zig @@ -163,7 +163,6 @@ pub const OpaqueJSPropertyNameAccumulator = struct_OpaqueJSPropertyNameAccumulat // This is a workaround for not receiving a JSException* object // This function lets us use the C API but returns a plain old JSValue // allowing us to have exceptions that include stack traces -pub extern "c" fn JSObjectCallAsFunctionReturnValue(ctx: JSContextRef, object: cpp.JSValue, thisObject: cpp.JSValue, argumentCount: usize, arguments: [*c]const JSValueRef) cpp.JSValue; pub extern "c" fn JSObjectCallAsFunctionReturnValueHoldingAPILock(ctx: JSContextRef, object: JSObjectRef, thisObject: JSObjectRef, argumentCount: usize, arguments: [*c]const JSValueRef) cpp.JSValue; pub extern fn JSRemoteInspectorDisableAutoStart() void; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index f091e91d48..d6a5e0daea 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -88,56 +88,6 @@ const String = bun.String; const debug = Output.scoped(.ModuleLoader, true); -// Setting BUN_OVERRIDE_MODULE_PATH to the path to the bun repo will make it so modules are loaded -// from there instead of the ones embedded into the binary. -// In debug mode, this is set automatically for you, using the path relative to this file. -fn jsModuleFromFile(from_path: string, comptime input: string) string { - // `modules_dev` is not minified or committed. Later we could also try loading source maps for it too. - const moduleFolder = if (comptime Environment.isDebug) "modules_dev" else "modules"; - - const Holder = struct { - pub const file = @embedFile("../js/out/" ++ moduleFolder ++ "/" ++ input); - }; - - if ((comptime !Environment.allow_assert) and from_path.len == 0) { - return Holder.file; - } - - var file: std.fs.File = undefined; - if ((comptime Environment.allow_assert) and from_path.len == 0) { - const absolute_path = comptime (Environment.base_path ++ (std.fs.path.dirname(std.fs.path.dirname(@src().file).?).?) ++ "/js/out/" ++ moduleFolder ++ "/" ++ input); - file = std.fs.openFileAbsoluteZ(absolute_path, .{ .mode = .read_only }) catch { - const WarnOnce = struct { - pub var warned = false; - }; - if (!WarnOnce.warned) { - WarnOnce.warned = true; - Output.prettyErrorln("Could not find file: " ++ absolute_path ++ " - using embedded version", .{}); - } - return Holder.file; - }; - } else { - var parts = [_]string{ from_path, "src/js/out/" ++ moduleFolder ++ "/" ++ input }; - var buf: bun.PathBuffer = undefined; - var absolute_path_to_use = Fs.FileSystem.instance.absBuf(&parts, &buf); - buf[absolute_path_to_use.len] = 0; - file = std.fs.openFileAbsoluteZ(absolute_path_to_use[0..absolute_path_to_use.len :0], .{ .mode = .read_only }) catch { - const WarnOnce = struct { - pub var warned = false; - }; - if (!WarnOnce.warned) { - WarnOnce.warned = true; - Output.prettyErrorln("Could not find file: {s}, so using embedded version", .{absolute_path_to_use}); - } - return Holder.file; - }; - } - - const contents = file.readToEndAlloc(bun.default_allocator, std.math.maxInt(usize)) catch @panic("Cannot read file " ++ input); - file.close(); - return contents; -} - inline fn jsSyntheticModule(comptime name: ResolvedSource.Tag, specifier: String) ResolvedSource { return ResolvedSource{ .allocator = null, diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig index a4f26346a5..5f197a0c1b 100644 --- a/src/bun.js/node/node_fs_stat_watcher.zig +++ b/src/bun.js/node/node_fs_stat_watcher.zig @@ -415,19 +415,16 @@ pub const StatWatcher = struct { const jsvalue = statToJSStats(this.globalThis, this.last_stat, this.bigint); this.last_jsvalue = JSC.Strong.create(jsvalue, this.globalThis); - const result = StatWatcher.listenerGetCached(this.js_this).?.call( + const vm = this.globalThis.bunVM(); + + _ = StatWatcher.listenerGetCached(this.js_this).?.call( this.globalThis, .undefined, &[2]JSC.JSValue{ jsvalue, jsvalue, }, - ); - - const vm = this.globalThis.bunVM(); - if (result.isAnyError()) { - _ = vm.uncaughtException(this.globalThis, result, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); vm.rareData().nodeFSStatWatcherScheduler(vm).append(this); } @@ -453,18 +450,14 @@ pub const StatWatcher = struct { const current_jsvalue = statToJSStats(this.globalThis, this.last_stat, this.bigint); this.last_jsvalue.set(this.globalThis, current_jsvalue); - const result = StatWatcher.listenerGetCached(this.js_this).?.call( + _ = StatWatcher.listenerGetCached(this.js_this).?.call( this.globalThis, .undefined, &[2]JSC.JSValue{ current_jsvalue, prev_jsvalue, }, - ); - if (result.isAnyError()) { - const vm = this.globalThis.bunVM(); - _ = vm.uncaughtException(this.globalThis, result, false); - } + ) catch |err| this.globalThis.reportActiveExceptionAsUnhandled(err); } pub fn onTimerInterval(timer: *uws.Timer) callconv(.C) void { diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index f207ab2f1d..209d4c85f9 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -540,7 +540,7 @@ pub const FSWatcher = struct { _ = listener.callWithGlobalThis( this.globalThis, &args, - ); + ) catch this.globalThis.clearException(); } } } @@ -561,7 +561,7 @@ pub const FSWatcher = struct { _ = listener.callWithGlobalThis( globalObject, &args, - ); + ) catch |e| this.globalThis.reportActiveExceptionAsUnhandled(e); } } } @@ -600,14 +600,10 @@ pub const FSWatcher = struct { filename, }; - const err = listener.callWithGlobalThis( + _ = listener.callWithGlobalThis( globalObject, &args, - ); - - if (err.toError()) |value| { - _ = JSC.VirtualMachine.get().uncaughtException(globalObject, value, false); - } + ) catch |err| globalObject.reportActiveExceptionAsUnhandled(err); } pub fn doRef(this: *FSWatcher, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue { diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index 1f14cceb9c..b32b9342f1 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -221,7 +221,7 @@ pub const Expect = struct { const newValue = promise.result(vm); switch (promise.status(vm)) { - .Fulfilled => switch (resolution) { + .fulfilled => switch (resolution) { .resolves => {}, .rejects => { if (!silent) { @@ -233,7 +233,7 @@ pub const Expect = struct { }, .none => unreachable, }, - .Rejected => switch (resolution) { + .rejected => switch (resolution) { .rejects => {}, .resolves => { if (!silent) { @@ -245,7 +245,7 @@ pub const Expect = struct { }, .none => unreachable, }, - .Pending => unreachable, + .pending => unreachable, } newValue.ensureStillAlive(); @@ -2336,43 +2336,40 @@ pub const Expect = struct { const prev_unhandled_pending_rejection_to_capture = vm.unhandled_pending_rejection_to_capture; vm.unhandled_pending_rejection_to_capture = &return_value; vm.onUnhandledRejection = &VirtualMachine.onQuietUnhandledRejectionHandlerCaptureValue; - const return_value_from_fucntion: JSValue = value.call(globalThis, .undefined, &.{}); + const return_value_from_function: JSValue = value.call(globalThis, .undefined, &.{}) catch |err| + globalThis.takeException(err); vm.unhandled_pending_rejection_to_capture = prev_unhandled_pending_rejection_to_capture; vm.global.handleRejectedPromises(); if (return_value == .zero) { - return_value = return_value_from_fucntion; + return_value = return_value_from_function; } if (return_value.asAnyPromise()) |promise| { vm.waitForPromise(promise); scope.apply(vm); - const promise_result = promise.result(globalThis.vm()); - - switch (promise.status(globalThis.vm())) { - .Fulfilled => { + switch (promise.unwrap(globalThis.vm(), .mark_handled)) { + .fulfilled => { break :brk null; }, - .Rejected => { - promise.setHandled(globalThis.vm()); - + .rejected => |rejected| { // since we know for sure it rejected, we should always return the error - break :brk promise_result.toError() orelse promise_result; + break :brk rejected.toError() orelse rejected; }, - .Pending => unreachable, + .pending => unreachable, } } - if (return_value != return_value_from_fucntion) { - if (return_value_from_fucntion.asAnyPromise()) |existing| { + if (return_value != return_value_from_function) { + if (return_value_from_function.asAnyPromise()) |existing| { existing.setHandled(globalThis.vm()); } } scope.apply(vm); - break :brk return_value.toError() orelse return_value_from_fucntion.toError(); + break :brk return_value.toError() orelse return_value_from_function.toError(); }; const did_throw = result_ != null; @@ -2441,7 +2438,7 @@ pub const Expect = struct { if (globalThis.hasException()) return .zero; // TODO: REMOVE THIS GETTER! Expose a binding to call .test on the RegExp object directly. if (expected_value.get(globalThis, "test")) |test_fn| { - const matches = test_fn.call(globalThis, expected_value, &.{received_message}); + const matches = test_fn.call(globalThis, expected_value, &.{received_message}) catch |err| globalThis.takeException(err); if (!matches.toBooleanSlow(globalThis)) return .undefined; } @@ -2526,7 +2523,7 @@ pub const Expect = struct { if (_received_message) |received_message| { // TODO: REMOVE THIS GETTER! Expose a binding to call .test on the RegExp object directly. if (expected_value.get(globalThis, "test")) |test_fn| { - const matches = test_fn.call(globalThis, expected_value, &.{received_message}); + const matches = test_fn.call(globalThis, expected_value, &.{received_message}) catch |err| globalThis.takeException(err); if (matches.toBooleanSlow(globalThis)) return .undefined; } } @@ -2666,7 +2663,6 @@ pub const Expect = struct { this.throw(globalThis, signature, expected_fmt, .{expected_class}); return .zero; } - pub fn toMatchSnapshot(this: *Expect, globalThis: *JSGlobalObject, callFrame: *CallFrame) JSValue { defer this.postMatch(globalThis); const thisValue = callFrame.this(); @@ -3801,18 +3797,12 @@ pub const Expect = struct { }; value.ensureStillAlive(); - const result = predicate.call(globalThis, .undefined, &.{value}); - - if (result.toError()) |err| { - var errors: [1]*anyopaque = undefined; - var _err = errors[0..errors.len]; - - _err[0] = err.asVoid(); - + const result = predicate.call(globalThis, .undefined, &.{value}) catch |e| { + const err = globalThis.takeException(e); const fmt = ZigString.init("toSatisfy() predicate threw an exception"); - globalThis.vm().throwError(globalThis, globalThis.createAggregateError(_err.ptr, _err.len, &fmt)); + globalThis.vm().throwError(globalThis, globalThis.createAggregateError(&.{err}, &fmt)); return .zero; - } + }; const not = this.flags.not; const pass = (result.isBoolean() and result.toBoolean()) != not; @@ -4673,7 +4663,7 @@ pub const Expect = struct { matcher_context_jsvalue.ensureStillAlive(); // call the custom matcher implementation - var result = matcher_fn.call(globalThis, matcher_context_jsvalue, args); + var result = matcher_fn.call(globalThis, matcher_context_jsvalue, args) catch |err| globalThis.takeException(err); assert(!result.isEmpty()); if (result.toError()) |err| { globalThis.throwValue(err); @@ -4690,9 +4680,9 @@ pub const Expect = struct { result.ensureStillAlive(); assert(!result.isEmpty()); switch (promise.status(vm)) { - .Pending => unreachable, - .Fulfilled => {}, - .Rejected => { + .pending => unreachable, + .fulfilled => {}, + .rejected => { // TODO: rewrite this code to use .then() instead of blocking the event loop JSC.VirtualMachine.get().runErrorHandler(result, null); globalThis.throw("Matcher `{s}` returned a promise that rejected", .{matcher_name}); @@ -4744,7 +4734,8 @@ pub const Expect = struct { if (comptime Environment.allow_assert) assert(message.isCallable(globalThis.vm())); // checked above - var message_result = message.callWithGlobalThis(globalThis, &[_]JSValue{}); + const message_result = message.callWithGlobalThis(globalThis, &.{}) catch |err| + globalThis.takeException(err); assert(!message_result.isEmpty()); if (message_result.toError()) |err| { globalThis.throwValue(err); @@ -5426,15 +5417,13 @@ pub const ExpectCustomAsymmetricMatcher = struct { args.appendAssumeCapacity(arg); } - var result = matcher_fn.call(globalThis, thisValue, args.items); - if (result.toError()) |err| { + const result = matcher_fn.call(globalThis, thisValue, args.items) catch |err| { if (dontThrow) { + globalThis.clearException(); return false; - } else { - globalThis.throwValue(globalThis, err); - return error.JSError; } - } + return err; + }; try writer.print("{}", .{result.toBunString(globalThis)}); } } diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 694ab86e48..c057b69b57 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -756,11 +756,11 @@ pub const TestScope = struct { // TODO: not easy to coerce JSInternalPromise as JSValue, // so simply wait for completion for now. switch (promise) { - .Internal => vm.waitForPromise(promise), + .internal => vm.waitForPromise(promise), else => {}, } switch (promise.status(vm.global.vm())) { - .Rejected => { + .rejected => { if (!promise.isHandled(vm.global.vm())) { _ = vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue(vm.global)); } @@ -771,10 +771,10 @@ pub const TestScope = struct { return .{ .fail = expect.active_test_expectation_counter.actual }; }, - .Pending => { + .pending => { task.promise_state = .pending; switch (promise) { - .Normal => |p| { + .normal => |p| { _ = p.asValue(vm.global).then(vm.global, task, jsOnResolve, jsOnReject); return .{ .pending = {} }; }, @@ -983,7 +983,7 @@ pub const DescribeScope = struct { }, }; if (result.asAnyPromise()) |promise| { - if (promise.status(globalObject.vm()) == .Pending) { + if (promise.status(globalObject.vm()) == .pending) { result.protect(); vm.waitForPromise(promise); result.unprotect(); @@ -1023,7 +1023,7 @@ pub const DescribeScope = struct { var result: JSValue = callJSFunctionForTestRunner(vm, globalThis, cb, &.{}); if (result.asAnyPromise()) |promise| { - if (promise.status(globalThis.vm()) == .Pending) { + if (promise.status(globalThis.vm()) == .pending) { result.protect(); vm.waitForPromise(promise); result.unprotect(); @@ -1123,7 +1123,7 @@ pub const DescribeScope = struct { if (result.asAnyPromise()) |prom| { globalObject.bunVM().waitForPromise(prom); switch (prom.status(globalObject.ptr().vm())) { - JSPromise.Status.Fulfilled => {}, + .fulfilled => {}, else => { _ = globalObject.bunVM().unhandledRejection(globalObject, prom.result(globalObject.ptr().vm()), prom.asValue(globalObject)); return .undefined; @@ -2146,15 +2146,10 @@ inline fn createEach( fn callJSFunctionForTestRunner(vm: *JSC.VirtualMachine, globalObject: *JSGlobalObject, function: JSValue, args: []const JSValue) JSValue { vm.eventLoop().enter(); - defer { - vm.eventLoop().exit(); - } + defer vm.eventLoop().exit(); globalObject.clearTerminationException(); - const result = function.call(globalObject, .undefined, args); - result.ensureStillAlive(); - - return result; + return function.call(globalObject, .undefined, args) catch |err| globalObject.takeException(err); } const assert = bun.assert; diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index bb690e6c14..9f25a93d7c 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -1244,7 +1244,8 @@ pub const JestPrettyFormat = struct { .Object, Writer, writer_, - toJSONFunction.call(this.globalThis, value, &.{}), + toJSONFunction.call(this.globalThis, value, &.{}) catch |err| + this.globalThis.takeException(err), .Object, enable_ansi_colors, ); diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig index 8733d9c829..127c0751bc 100644 --- a/src/bun.js/web_worker.zig +++ b/src/bun.js/web_worker.zig @@ -367,7 +367,7 @@ pub const WebWorker = struct { return; }; - if (promise.status(vm.global.vm()) == .Rejected) { + if (promise.status(vm.global.vm()) == .rejected) { const handled = vm.uncaughtException(vm.global, promise.result(vm.global.vm()), true); if (!handled) { diff --git a/src/bun.js/webcore/blob/ReadFile.zig b/src/bun.js/webcore/blob/ReadFile.zig index ec02085dc9..02ab3921b1 100644 --- a/src/bun.js/webcore/blob/ReadFile.zig +++ b/src/bun.js/webcore/blob/ReadFile.zig @@ -41,7 +41,7 @@ pub fn NewReadFileHandler(comptime Function: anytype) type { } }; - JSC.AnyPromise.wrap(.{ .Normal = promise }, globalThis, WrappedFn.wrapped, .{ &blob, globalThis, bytes }); + JSC.AnyPromise.wrap(.{ .normal = promise }, globalThis, WrappedFn.wrapped, .{ &blob, globalThis, bytes }); }, .err => |err| { promise.reject(globalThis, err.toErrorInstance(globalThis)); diff --git a/src/bun.js/webcore/blob/WriteFile.zig b/src/bun.js/webcore/blob/WriteFile.zig index 1f7f8ebe40..13804680ae 100644 --- a/src/bun.js/webcore/blob/WriteFile.zig +++ b/src/bun.js/webcore/blob/WriteFile.zig @@ -710,21 +710,13 @@ pub const WriteFileWaitFromLockedValueTask = struct { var blob = value.use(); // TODO: this should be one promise not two! const new_promise = Blob.writeFileWithSourceDestination(globalThis, &blob, &file_blob, this.mkdirp_if_not_exists); - if (new_promise.asAnyPromise()) |_promise| { - switch (_promise.status(globalThis.vm())) { - .Pending => { - // Fulfill the new promise using the old promise - promise.resolve( - globalThis, - new_promise, - ); - }, - .Rejected => { - promise.reject(globalThis, _promise.result(globalThis.vm())); - }, - else => { - promise.resolve(globalThis, _promise.result(globalThis.vm())); - }, + if (new_promise.asAnyPromise()) |p| { + switch (p.unwrap(globalThis.vm(), .mark_handled)) { + // Fulfill the new promise using the pending promise + .pending => promise.resolve(globalThis, new_promise), + + .rejected => |err| promise.reject(globalThis, err), + .fulfilled => |result| promise.resolve(globalThis, result), } } diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index e5bc299c23..1bae953617 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -168,7 +168,7 @@ pub const Body = struct { const promise = this.promise orelse return false; if (promise.asAnyPromise()) |internal| { - if (internal.status(this.global.vm()) != .Pending) { + if (internal.status(this.global.vm()) != .pending) { promise.unprotect(); this.promise = null; return false; diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig index 62f1ae82fd..61c9c735e2 100644 --- a/src/bun.js/webcore/request.zig +++ b/src/bun.js/webcore/request.zig @@ -135,10 +135,9 @@ pub const Request = struct { pub fn trigger(this: *InternalJSEventCallback, eventType: EventType, globalThis: *JSC.JSGlobalObject) bool { if (this.function.get()) |callback| { - const result = callback.call(globalThis, JSC.JSValue.jsUndefined(), &.{JSC.JSValue.jsNumber(@intFromEnum(eventType))}); - if (result.toError()) |js_error| { - globalThis.throwValue(js_error); - } + _ = callback.call(globalThis, JSC.JSValue.jsUndefined(), &.{JSC.JSValue.jsNumber( + @intFromEnum(eventType), + )}) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); return true; } return false; diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 13bdd04d0a..bcbf7b0054 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -1301,8 +1301,13 @@ pub const Fetch = struct { const js_hostname = hostname.toJS(globalObject); js_hostname.ensureStillAlive(); js_cert.ensureStillAlive(); - const check_result = check_server_identity.call(globalObject, .undefined, &[_]JSC.JSValue{ js_hostname, js_cert }); - // if check failed abort the request + const check_result = check_server_identity.call( + globalObject, + .undefined, + &.{ js_hostname, js_cert }, + ) catch |err| globalObject.takeException(err); + + // > Returns object [...] on failure if (check_result.isAnyError()) { // mark to wait until deinit this.is_waiting_abort = this.result.has_more; @@ -1317,6 +1322,9 @@ pub const Fetch = struct { this.result.fail = error.ERR_TLS_CERT_ALTNAME_INVALID; return false; } + + // > On success, returns + // We treat any non-error value as a success. return true; } } diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 3397ef0657..9b978eea4a 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -4415,8 +4415,9 @@ pub const ByteStream = struct { json: JSC.JSPromise.Strong, pub fn fulfill(this: *BufferAction, blob: *AnyBlob) void { - blob.wrap(.{ .Normal = this.swap() }, this.globalThis().?, this.*); + blob.wrap(.{ .normal = this.swap() }, this.globalThis().?, this.*); } + pub fn reject(this: *BufferAction, err: StreamResult.StreamError) void { this.swap().reject(this.globalThis().?, err.toJSWeak(this.globalThis().?)[0]); } diff --git a/src/bun.zig b/src/bun.zig index 65ae9b83a6..0da72ab1e1 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -57,8 +57,15 @@ pub inline fn namedAllocator(comptime name: [:0]const u8) std.mem.Allocator { return default_allocator; } -pub const OOM = error{ - OutOfMemory, +pub const OOM = error{OutOfMemory}; + +pub const JSError = error{ + /// There is an active exception on the global object. Options: + /// + /// - Bubble it up to the caller + /// - Call `global.takeException(err)` to get the JSValue of the exception, + /// - Call `global.reportActiveExceptionAsUnhandled(err)` to make it unhandled. + JSError, }; pub const C = @import("root").C; @@ -1079,6 +1086,10 @@ pub fn CaseInsensitiveASCIIStringArrayHashMap(comptime Type: type) type { return std.ArrayHashMap([]const u8, Type, CaseInsensitiveASCIIStringContext, true); } +pub fn CaseInsensitiveASCIIStringArrayHashMapUnmanaged(comptime Type: type) type { + return std.ArrayHashMapUnmanaged([]const u8, Type, CaseInsensitiveASCIIStringContext, true); +} + pub fn StringArrayHashMapUnmanaged(comptime Type: type) type { return std.ArrayHashMapUnmanaged([]const u8, Type, StringArrayHashMapContext, true); } @@ -3239,6 +3250,49 @@ pub fn getUserName(output_buffer: []u8) ?[]const u8 { return output_buffer[0..size]; } +pub fn runtimeEmbedFile( + comptime root: enum { codegen, src }, + comptime sub_path: []const u8, +) []const u8 { + comptime assert(Environment.isDebug); + comptime assert(!Environment.embed_code); + + const abs_path = comptime path: { + var buf: bun.PathBuffer = undefined; + var fba = std.heap.FixedBufferAllocator.init(&buf); + const resolved = (std.fs.path.resolve(fba.allocator(), &.{ + switch (root) { + .codegen => Environment.codegen_path, + .src => Environment.base_path ++ "/src", + }, + sub_path, + }) catch + @compileError(unreachable))[0..].*; + break :path &resolved; + }; + + const static = struct { + var storage: []const u8 = undefined; + var once = std.once(load); + + fn load() void { + storage = std.fs.cwd().readFileAlloc(default_allocator, abs_path, std.math.maxInt(usize)) catch |e| { + Output.panic( + \\Failed to load '{s}': {} + \\ + \\To improve iteration speed, some files are not embedded but + \\loaded at runtime, at the cost of making the binary non-portable. + \\To fix this, pass -DFORCE_EMBED_CODE=1 to CMake + , .{ abs_path, e }); + }; + } + }; + + static.once.call(); + + return static.storage; +} + pub inline fn markWindowsOnly() if (Environment.isWindows) void else noreturn { if (Environment.isWindows) { return; @@ -3396,7 +3450,7 @@ pub fn assert_neql(a: anytype, b: anytype) callconv(callconv_inline) void { return assert(a != b); } -pub inline fn unsafeAssert(condition: bool) void { +pub fn unsafeAssert(condition: bool) callconv(callconv_inline) void { if (!condition) { unreachable; } @@ -3661,6 +3715,8 @@ pub fn memmove(output: []u8, input: []const u8) void { pub const hmac = @import("./hmac.zig"); pub const libdeflate = @import("./deps/libdeflate.zig"); +pub const kit = @import("kit/kit.zig"); + /// like std.enums.tagName, except it doesn't lose the sentinel value. pub fn tagName(comptime Enum: type, value: Enum) ?[:0]const u8 { return inline for (@typeInfo(Enum).Enum.fields) |f| { diff --git a/src/bun_js.zig b/src/bun_js.zig index afad753c9f..7144c3ae49 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -307,7 +307,7 @@ pub const Run = struct { } if (vm.loadEntryPoint(this.entry_path)) |promise| { - if (promise.status(vm.global.vm()) == .Rejected) { + if (promise.status(vm.global.vm()) == .rejected) { const handled = vm.uncaughtException(vm.global, promise.result(vm.global.vm()), true); if (vm.hot_reload != .none or handled) { @@ -375,7 +375,7 @@ pub const Run = struct { { if (this.vm.isWatcherEnabled()) { var prev_promise = this.vm.pending_internal_promise; - if (prev_promise.status(vm.global.vm()) == .Rejected) { + if (prev_promise.status(vm.global.vm()) == .rejected) { _ = vm.unhandledRejection(this.vm.global, this.vm.pending_internal_promise.result(vm.global.vm()), this.vm.pending_internal_promise.asValue()); } @@ -384,7 +384,7 @@ pub const Run = struct { vm.tick(); // Report exceptions in hot-reloaded modules - if (this.vm.pending_internal_promise.status(vm.global.vm()) == .Rejected and prev_promise != this.vm.pending_internal_promise) { + if (this.vm.pending_internal_promise.status(vm.global.vm()) == .rejected and prev_promise != this.vm.pending_internal_promise) { prev_promise = this.vm.pending_internal_promise; _ = vm.unhandledRejection(this.vm.global, this.vm.pending_internal_promise.result(vm.global.vm()), this.vm.pending_internal_promise.asValue()); continue; @@ -395,7 +395,7 @@ pub const Run = struct { vm.onBeforeExit(); - if (this.vm.pending_internal_promise.status(vm.global.vm()) == .Rejected and prev_promise != this.vm.pending_internal_promise) { + if (this.vm.pending_internal_promise.status(vm.global.vm()) == .rejected and prev_promise != this.vm.pending_internal_promise) { prev_promise = this.vm.pending_internal_promise; _ = vm.unhandledRejection(this.vm.global, this.vm.pending_internal_promise.result(vm.global.vm()), this.vm.pending_internal_promise.asValue()); } @@ -403,7 +403,7 @@ pub const Run = struct { vm.eventLoop().tickPossiblyForever(); } - if (this.vm.pending_internal_promise.status(vm.global.vm()) == .Rejected and prev_promise != this.vm.pending_internal_promise) { + if (this.vm.pending_internal_promise.status(vm.global.vm()) == .rejected and prev_promise != this.vm.pending_internal_promise) { prev_promise = this.vm.pending_internal_promise; _ = vm.unhandledRejection(this.vm.global, this.vm.pending_internal_promise.result(vm.global.vm()), this.vm.pending_internal_promise.asValue()); } @@ -418,7 +418,7 @@ pub const Run = struct { const result = vm.entry_point_result.value.get() orelse .undefined; if (result.asAnyPromise()) |promise| { switch (promise.status(vm.jsc)) { - .Pending => { + .pending => { result._then(vm.global, .undefined, Bun__onResolveEntryPointResult, Bun__onRejectEntryPointResult); vm.tick(); diff --git a/src/bundler.zig b/src/bundler.zig index ea0d8ef729..4c09202e2b 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1387,7 +1387,6 @@ pub const Bundler = struct { var opts = js_parser.Parser.Options.init(jsx, loader); - opts.legacy_transform_require_to_import = bundler.options.allow_runtime and !bundler.options.target.isBun(); opts.features.emit_decorator_metadata = this_parse.emit_decorator_metadata; opts.features.allow_runtime = bundler.options.allow_runtime; opts.features.set_breakpoint_on_first_line = this_parse.set_breakpoint_on_first_line; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 8236337f24..a3e635caae 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -330,8 +330,9 @@ pub const BundleV2 = struct { graph: Graph = Graph{}, linker: LinkerContext = LinkerContext{ .loop = undefined }, bun_watcher: ?*Watcher.Watcher = null, + // kit_watcher: ?*bun.kit.DevServer.HotReloader.Watcher = null, plugins: ?*JSC.API.JSBundler.Plugin = null, - completion: ?*JSBundleCompletionTask = null, + completion: ?CompletionPtr = null, source_code_length: usize = 0, // There is a race condition where an onResolve plugin may schedule a task on the bundle thread before it's parsing task completes @@ -343,6 +344,17 @@ pub const BundleV2 = struct { unique_key: u64 = 0, dynamic_import_entry_points: std.AutoArrayHashMap(Index.Int, void) = undefined, + pub const CompletionPtr = union(enum) { + js: *JSBundleCompletionTask, + kit: *bun.kit.DevServer.BundleTask, + + pub fn log(ptr: CompletionPtr) *bun.logger.Log { + return switch (ptr) { + inline else => |inner| &inner.log, + }; + } + }; + const debug = Output.scoped(.Bundle, false); pub inline fn loop(this: *BundleV2) *EventLoop { @@ -483,7 +495,7 @@ pub const BundleV2 = struct { ) catch |err| { var handles_import_errors = false; var source: ?*const Logger.Source = null; - const log = &this.completion.?.log; + const log = this.completion.?.log(); if (import_record.importer_source_index) |importer| { var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index]; @@ -720,12 +732,11 @@ pub const BundleV2 = struct { ) !*BundleV2 { bundler.env.loadTracy(); - var generator = try allocator.create(BundleV2); + const this = try allocator.create(BundleV2); bundler.options.mark_builtins_as_external = bundler.options.target.isBun() or bundler.options.target == .node; bundler.resolver.opts.mark_builtins_as_external = bundler.options.target.isBun() or bundler.options.target == .node; - const this = generator; - generator.* = BundleV2{ + this.* = .{ .bundler = bundler, .client_bundler = bundler, .server_bundler = bundler, @@ -742,49 +753,64 @@ pub const BundleV2 = struct { }, }, }; - generator.linker.graph.allocator = generator.graph.heap.allocator(); - generator.graph.allocator = generator.linker.graph.allocator; - generator.bundler.allocator = generator.graph.allocator; - generator.bundler.resolver.allocator = generator.graph.allocator; - generator.bundler.linker.allocator = generator.graph.allocator; - generator.bundler.log.msgs.allocator = generator.graph.allocator; - generator.bundler.log.clone_line_text = true; + this.linker.graph.allocator = this.graph.heap.allocator(); + this.graph.allocator = this.linker.graph.allocator; + this.bundler.allocator = this.graph.allocator; + this.bundler.resolver.allocator = this.graph.allocator; + this.bundler.linker.allocator = this.graph.allocator; + this.bundler.log.msgs.allocator = this.graph.allocator; + this.bundler.log.clone_line_text = true; - // We don't expose a way to disable this right now. - generator.bundler.options.tree_shaking = true; - generator.bundler.resolver.opts.tree_shaking = true; + // We don't expose an option to disable this. Kit requires tree-shaking + // disabled since every export is always referenced in case a future + // module depends on a previously unused export. + if (this.bundler.options.output_format == .internal_kit_dev) { + this.bundler.options.tree_shaking = false; + this.bundler.resolver.opts.tree_shaking = false; + } else { + this.bundler.options.tree_shaking = true; + this.bundler.resolver.opts.tree_shaking = true; + } - generator.linker.graph.bundler_graph = &generator.graph; - generator.linker.resolver = &generator.bundler.resolver; - generator.linker.graph.code_splitting = bundler.options.code_splitting; - generator.graph.code_splitting = bundler.options.code_splitting; + this.linker.graph.bundler_graph = &this.graph; + this.linker.resolver = &this.bundler.resolver; + this.linker.graph.code_splitting = bundler.options.code_splitting; + this.graph.code_splitting = bundler.options.code_splitting; - generator.linker.options.minify_syntax = bundler.options.minify_syntax; - generator.linker.options.minify_identifiers = bundler.options.minify_identifiers; - generator.linker.options.minify_whitespace = bundler.options.minify_whitespace; - generator.linker.options.emit_dce_annotations = bundler.options.emit_dce_annotations; - generator.linker.options.ignore_dce_annotations = bundler.options.ignore_dce_annotations; + this.linker.options.minify_syntax = bundler.options.minify_syntax; + this.linker.options.minify_identifiers = bundler.options.minify_identifiers; + this.linker.options.minify_whitespace = bundler.options.minify_whitespace; + this.linker.options.emit_dce_annotations = bundler.options.emit_dce_annotations; + this.linker.options.ignore_dce_annotations = bundler.options.ignore_dce_annotations; - generator.linker.options.source_maps = bundler.options.source_map; - generator.linker.options.tree_shaking = bundler.options.tree_shaking; - generator.linker.options.public_path = bundler.options.public_path; - generator.linker.options.target = bundler.options.target; + this.linker.options.source_maps = bundler.options.source_map; + this.linker.options.tree_shaking = bundler.options.tree_shaking; + this.linker.options.public_path = bundler.options.public_path; + this.linker.options.target = bundler.options.target; + this.linker.options.output_format = bundler.options.output_format; - var pool = try generator.graph.allocator.create(ThreadPool); + var pool = try this.graph.allocator.create(ThreadPool); if (enable_reloading) { - Watcher.enableHotModuleReloading(generator); + Watcher.enableHotModuleReloading(this); } // errdefer pool.destroy(); - errdefer generator.graph.heap.deinit(); + errdefer this.graph.heap.deinit(); pool.* = ThreadPool{}; - generator.graph.pool = pool; + this.graph.pool = pool; try pool.start( this, thread_pool, ); - return generator; + // sanity checks for kit + if (this.bundler.options.output_format == .internal_kit_dev) { + if (this.bundler.options.compile) @panic("TODO: internal_kit_dev does not support compile"); + if (this.bundler.options.code_splitting) @panic("TODO: internal_kit_dev does not support code splitting"); + if (this.bundler.options.transform_only) @panic("TODO: internal_kit_dev does not support transform_only"); + } + + return this; } pub fn enqueueEntryPoints(this: *BundleV2, user_entry_points: []const string) !ThreadPoolLib.Batch { @@ -1142,6 +1168,8 @@ pub const BundleV2 = struct { } } + pub const JSBundleThread = BundleThread(JSBundleCompletionTask); + pub fn generateFromJavaScript( config: bun.JSC.API.JSBundler.Config, plugins: ?*bun.JSC.API.JSBundler.Plugin, @@ -1169,7 +1197,7 @@ pub const BundleV2 = struct { // conditions from creating two _ = JSC.WorkPool.get(); - BundleThread.enqueue(completion); + JSBundleThread.singleton.enqueue(completion); completion.poll_ref.ref(globalThis.bunVM()); @@ -1180,6 +1208,12 @@ pub const BundleV2 = struct { output_files: std.ArrayList(options.OutputFile), }; + pub const Result = union(enum) { + pending: void, + err: anyerror, + value: BuildResult, + }; + pub const JSBundleCompletionTask = struct { config: bun.JSC.API.JSBundler.Config, jsc_event_loop: *bun.JSC.EventLoop, @@ -1197,11 +1231,67 @@ pub const BundleV2 = struct { plugins: ?*bun.JSC.API.JSBundler.Plugin = null, ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), - pub const Result = union(enum) { - pending: void, - err: anyerror, - value: BuildResult, - }; + pub fn configureBundler( + completion: *JSBundleCompletionTask, + bundler: *Bundler, + allocator: std.mem.Allocator, + ) !void { + const config = &completion.config; + + bundler.* = try bun.Bundler.init( + allocator, + &completion.log, + Api.TransformOptions{ + .define = if (config.define.count() > 0) config.define.toAPI() else null, + .entry_points = config.entry_points.keys(), + .target = config.target.toAPI(), + .absolute_working_dir = if (config.dir.list.items.len > 0) + config.dir.toOwnedSliceLeaky() + else + null, + .inject = &.{}, + .external = config.external.keys(), + .main_fields = &.{}, + .extension_order = &.{}, + .env_files = &.{}, + .conditions = config.conditions.map.keys(), + .ignore_dce_annotations = bundler.options.ignore_dce_annotations, + }, + completion.env, + ); + + bundler.options.entry_points = config.entry_points.keys(); + bundler.options.jsx = config.jsx; + bundler.options.no_macros = config.no_macros; + bundler.options.react_server_components = config.server_components.client.items.len > 0 or config.server_components.server.items.len > 0; + bundler.options.loaders = try options.loadersFromTransformOptions(allocator, config.loaders, config.target); + bundler.options.entry_naming = config.names.entry_point.data; + bundler.options.chunk_naming = config.names.chunk.data; + bundler.options.asset_naming = config.names.asset.data; + + bundler.options.public_path = config.public_path.list.items; + + bundler.options.output_dir = config.outdir.toOwnedSliceLeaky(); + bundler.options.root_dir = config.rootdir.toOwnedSliceLeaky(); + bundler.options.minify_syntax = config.minify.syntax; + bundler.options.minify_whitespace = config.minify.whitespace; + bundler.options.minify_identifiers = config.minify.identifiers; + bundler.options.inlining = config.minify.syntax; + bundler.options.source_map = config.source_map; + bundler.options.packages = config.packages; + bundler.options.code_splitting = config.code_splitting; + bundler.options.emit_dce_annotations = config.emit_dce_annotations orelse !config.minify.whitespace; + bundler.options.ignore_dce_annotations = config.ignore_dce_annotations; + + bundler.configureLinker(); + try bundler.configureDefines(); + + bundler.resolver.opts = bundler.options; + } + + pub fn completeOnBundleThread(completion: *JSBundleCompletionTask) void { + completion.jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(completion.task.task())); + } pub const TaskCompletion = bun.JSC.AnyTask.New(JSBundleCompletionTask, onComplete); @@ -1546,188 +1636,6 @@ pub const BundleV2 = struct { } } - pub fn timerCallback(_: *bun.windows.libuv.Timer) callconv(.C) void {} - - pub fn generateInNewThreadWrap(instance: *BundleThread) void { - Output.Source.configureNamedThread("Bundler"); - - instance.waker = bun.Async.Waker.init() catch @panic("Failed to create waker"); - - var timer: bun.windows.libuv.Timer = undefined; - if (bun.Environment.isWindows) { - timer.init(instance.waker.loop.uv_loop); - timer.start(std.math.maxInt(u64), std.math.maxInt(u64), &timerCallback); - } - - // 3. Unblock the calling thread so it can continue. - instance.wait_for_thread_to_have_created_the_waker.store(0, .monotonic); - std.Thread.Futex.wake(&instance.wait_for_thread_to_have_created_the_waker, 10); - - var has_bundled = false; - while (true) { - while (instance.queue.pop()) |completion| { - generateInNewThread(completion, instance.generation) catch |err| { - completion.result = .{ .err = err }; - completion.jsc_event_loop.enqueueTaskConcurrent( - JSC.ConcurrentTask.create(completion.task.task()), - ); - }; - has_bundled = true; - } - instance.generation +|= 1; - - if (has_bundled) { - bun.Mimalloc.mi_collect(false); - has_bundled = false; - } - - _ = instance.waker.wait(); - } - } - - pub const BundleThread = struct { - /// Must be created on the BundleThread. - /// Uninitialized until `wait_for_thread_to_have_created_the_waker` is set to 0. - waker: bun.Async.Waker, - queue: bun.UnboundedQueue(JSBundleCompletionTask, .next), - generation: bun.Generation, - wait_for_thread_to_have_created_the_waker: std.atomic.Value(u32), - - pub var instance: ?*BundleThread = undefined; - pub var load_once = std.once(loadOnce); - - // Blocks the calling thread until the bun build thread is created. - // std.once also blocks other callers of this function until the first caller is done. - fn loadOnce() void { - const this = bun.default_allocator.create(BundleThread) catch bun.outOfMemory(); - this.* = .{ - .waker = undefined, - .queue = .{}, - .generation = 0, - .wait_for_thread_to_have_created_the_waker = .{ - .raw = 1, - }, - }; - BundleThread.instance = this; - - // 2. Spawn the bun build thread. - var thread = std.Thread.spawn(.{}, generateInNewThreadWrap, .{instance.?}) catch Output.panic("Failed to spawn bun build thread", .{}); - thread.detach(); - - while (this.wait_for_thread_to_have_created_the_waker.load(.monotonic) > 0) std.Thread.Futex.wait(&this.wait_for_thread_to_have_created_the_waker, 1); - } - fn get() *BundleThread { - load_once.call(); - return instance.?; - } - pub fn enqueue(task: *JSBundleCompletionTask) void { - const this = get(); - this.queue.push(task); - this.waker.wake(); - } - }; - - /// This is called from `Bun.build` in JavaScript. - fn generateInNewThread( - completion: *JSBundleCompletionTask, - generation: bun.Generation, - ) !void { - var heap = try ThreadlocalArena.init(); - defer heap.deinit(); - - const allocator = heap.allocator(); - var ast_memory_allocator = try allocator.create(js_ast.ASTMemoryAllocator); - ast_memory_allocator.* = .{ - .allocator = allocator, - }; - ast_memory_allocator.reset(); - ast_memory_allocator.push(); - - const config = &completion.config; - var bundler = try allocator.create(bun.Bundler); - - bundler.* = try bun.Bundler.init( - allocator, - &completion.log, - Api.TransformOptions{ - .define = if (config.define.count() > 0) config.define.toAPI() else null, - .entry_points = config.entry_points.keys(), - .target = config.target.toAPI(), - .absolute_working_dir = if (config.dir.list.items.len > 0) config.dir.toOwnedSliceLeaky() else null, - .inject = &.{}, - .external = config.external.keys(), - .main_fields = &.{}, - .extension_order = &.{}, - .env_files = &.{}, - .conditions = config.conditions.map.keys(), - .ignore_dce_annotations = bundler.options.ignore_dce_annotations, - }, - completion.env, - ); - bundler.options.jsx = config.jsx; - bundler.options.no_macros = config.no_macros; - bundler.options.react_server_components = config.server_components.client.items.len > 0 or config.server_components.server.items.len > 0; - bundler.options.loaders = try options.loadersFromTransformOptions(allocator, config.loaders, config.target); - bundler.options.entry_naming = config.names.entry_point.data; - bundler.options.chunk_naming = config.names.chunk.data; - bundler.options.asset_naming = config.names.asset.data; - - bundler.options.public_path = config.public_path.list.items; - - bundler.options.output_dir = config.outdir.toOwnedSliceLeaky(); - bundler.options.root_dir = config.rootdir.toOwnedSliceLeaky(); - bundler.options.minify_syntax = config.minify.syntax; - bundler.options.minify_whitespace = config.minify.whitespace; - bundler.options.minify_identifiers = config.minify.identifiers; - bundler.options.inlining = config.minify.syntax; - bundler.options.source_map = config.source_map; - bundler.options.packages = config.packages; - bundler.resolver.generation = generation; - bundler.options.code_splitting = config.code_splitting; - bundler.options.emit_dce_annotations = config.emit_dce_annotations orelse !config.minify.whitespace; - bundler.options.ignore_dce_annotations = config.ignore_dce_annotations; - - bundler.configureLinker(); - try bundler.configureDefines(); - - bundler.resolver.opts = bundler.options; - - const this = try BundleV2.init(bundler, allocator, JSC.AnyEventLoop.init(allocator), false, JSC.WorkPool.get(), heap); - this.plugins = completion.plugins; - this.completion = completion; - completion.bundler = this; - - defer { - if (this.graph.pool.pool.threadpool_context == @as(?*anyopaque, @ptrCast(this.graph.pool))) { - this.graph.pool.pool.threadpool_context = null; - } - - ast_memory_allocator.pop(); - this.deinit(); - } - - errdefer { - // Wait for wait groups to finish. There still may be - this.linker.source_maps.line_offset_wait_group.wait(); - this.linker.source_maps.quoted_contents_wait_group.wait(); - - var out_log = Logger.Log.init(bun.default_allocator); - this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); - completion.log = out_log; - } - - completion.result = .{ - .value = .{ - .output_files = try this.runFromJSInNewThread(config), - }, - }; - - var out_log = Logger.Log.init(bun.default_allocator); - this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); - completion.log = out_log; - completion.jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(completion.task.task())); - } - pub fn deinit(this: *BundleV2) void { defer this.graph.ast.deinit(bun.default_allocator); defer this.graph.input_files.deinit(bun.default_allocator); @@ -1751,7 +1659,7 @@ pub const BundleV2 = struct { this.free_list.clearAndFree(); } - pub fn runFromJSInNewThread(this: *BundleV2, config: *const bun.JSC.API.JSBundler.Config) !std.ArrayList(options.OutputFile) { + pub fn runFromJSInNewThread(this: *BundleV2, entry_points: []const []const u8) !std.ArrayList(options.OutputFile) { this.unique_key = std.crypto.random.int(u64); if (this.bundler.log.errors > 0) { @@ -1763,7 +1671,7 @@ pub const BundleV2 = struct { bun.Mimalloc.mi_collect(true); } - this.graph.pool.pool.schedule(try this.enqueueEntryPoints(config.entry_points.keys())); + this.graph.pool.pool.schedule(try this.enqueueEntryPoints(entry_points)); // We must wait for all the parse tasks to complete, even if there are errors. this.waitForParse(); @@ -1831,7 +1739,7 @@ pub const BundleV2 = struct { .original_target = original_target orelse this.bundler.options.target, }, }, - this.completion.?, + this.completion.?.js, ); resolve.dispatch(); return true; @@ -1851,7 +1759,7 @@ pub const BundleV2 = struct { }); var load = bun.default_allocator.create(JSC.API.JSBundler.Load) catch unreachable; load.* = JSC.API.JSBundler.Load.create( - this.completion.?, + this.completion.?.js, parse.source_index, parse.path.loader(&this.bundler.options.loaders) orelse options.Loader.js, parse.path, @@ -2150,9 +2058,9 @@ pub const BundleV2 = struct { }); } - if (this.bun_watcher != null) { + if (this.bun_watcher) |watcher| { if (empty_result.watcher_data.fd != .zero and empty_result.watcher_data.fd != bun.invalid_fd) { - _ = this.bun_watcher.?.addFile( + _ = watcher.addFile( empty_result.watcher_data.fd, input_files.items(.source)[empty_result.source_index.get()].path.text, bun.hash32(input_files.items(.source)[empty_result.source_index.get()].path.text), @@ -2163,15 +2071,28 @@ pub const BundleV2 = struct { ); } } + // else if (this.kit_watcher) |watcher| { + // if (empty_result.watcher_data.fd != .zero and empty_result.watcher_data.fd != bun.invalid_fd) { + // _ = watcher.addFile( + // empty_result.watcher_data.fd, + // input_files.items(.source)[empty_result.source_index.get()].path.text, + // bun.hash32(input_files.items(.source)[empty_result.source_index.get()].path.text), + // graph.input_files.items(.loader)[empty_result.source_index.get()], + // empty_result.watcher_data.dir_fd, + // null, + // false, + // ); + // } + // } }, .success => |*result| { result.log.cloneToWithRecycled(this.bundler.log, true) catch unreachable; { // to minimize contention, we add watcher here - if (this.bun_watcher != null) { + if (this.bun_watcher) |watcher| { if (result.watcher_data.fd != .zero and result.watcher_data.fd != bun.invalid_fd) { - _ = this.bun_watcher.?.addFile( + _ = watcher.addFile( result.watcher_data.fd, result.source.path.text, bun.hash32(result.source.path.text), @@ -2182,6 +2103,19 @@ pub const BundleV2 = struct { ); } } + // else if (this.kit_watcher) |watcher| { + // if (result.watcher_data.fd != .zero and result.watcher_data.fd != bun.invalid_fd) { + // _ = watcher.addFile( + // result.watcher_data.fd, + // result.source.path.text, + // bun.hash32(result.source.path.text), + // result.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file, + // result.watcher_data.dir_fd, + // result.watcher_data.package_json, + // false, + // ); + // } + // } } // Warning: this array may resize in this function call @@ -2329,6 +2263,182 @@ pub const BundleV2 = struct { } }; +/// Used to keep the bundle thread from spinning on Windows +pub fn timerCallback(_: *bun.windows.libuv.Timer) callconv(.C) void {} + +/// Used for Bun.build and Kit, as they asynchronously schedule multiple +/// bundles. To account for their respective differences, the scheduling code +/// is generalized over the Task structure. +/// +/// - `configureBundler` is used to configure `Bundler`. +/// - `completeOnBundleThread` is used to tell the task that it is done. +/// +pub fn BundleThread(CompletionStruct: type) type { + return struct { + const Self = @This(); + + waker: bun.Async.Waker, + ready_event: std.Thread.ResetEvent, + queue: bun.UnboundedQueue(CompletionStruct, .next), + generation: bun.Generation = 0, + + /// To initialize, put this somewhere in memory, and then call `spawn()` + pub const uninitialized: Self = .{ + .waker = undefined, + .queue = .{}, + .generation = 0, + .ready_event = .{}, + }; + + pub fn spawn(instance: *Self) !std.Thread { + const thread = try std.Thread.spawn(.{}, threadMain, .{instance}); + instance.ready_event.wait(); + return thread; + } + + /// Lazily-initialized singleton. This is used for `Bun.build` since the + /// bundle thread may not be needed. Kit always uses the bundler, so it + /// just initializes `BundleThread` + pub const singleton = struct { + var once = std.once(loadOnceImpl); + var instance: ?*Self = null; + + // Blocks the calling thread until the bun build thread is created. + // std.once also blocks other callers of this function until the first caller is done. + fn loadOnceImpl() void { + const bundle_thread = bun.default_allocator.create(Self) catch bun.outOfMemory(); + bundle_thread.* = uninitialized; + instance = bundle_thread; + + // 2. Spawn the bun build thread. + const os_thread = bundle_thread.spawn() catch + Output.panic("Failed to spawn bun build thread", .{}); + os_thread.detach(); + } + + pub fn get() *Self { + once.call(); + return instance.?; + } + + pub fn enqueue(completion: *CompletionStruct) void { + get().enqueue(completion); + } + }; + + pub fn enqueue(instance: *Self, completion: *CompletionStruct) void { + instance.queue.push(completion); + instance.waker.wake(); + } + + fn threadMain(instance: *Self) void { + Output.Source.configureNamedThread("Bundler"); + + instance.waker = bun.Async.Waker.init() catch @panic("Failed to create waker"); + + // Unblock the calling thread so it can continue. + instance.ready_event.set(); + + var timer: bun.windows.libuv.Timer = undefined; + if (bun.Environment.isWindows) { + timer.init(instance.waker.loop.uv_loop); + timer.start(std.math.maxInt(u64), std.math.maxInt(u64), &timerCallback); + } + + var has_bundled = false; + while (true) { + while (instance.queue.pop()) |completion| { + generateInNewThread(completion, instance.generation) catch |err| { + completion.result = .{ .err = err }; + completion.completeOnBundleThread(); + }; + has_bundled = true; + } + instance.generation +|= 1; + + if (has_bundled) { + bun.Mimalloc.mi_collect(false); + has_bundled = false; + } + + _ = instance.waker.wait(); + } + } + + /// This is called from `Bun.build` in JavaScript. + fn generateInNewThread(completion: *CompletionStruct, generation: bun.Generation) !void { + var heap = try ThreadlocalArena.init(); + defer heap.deinit(); + + const allocator = heap.allocator(); + var ast_memory_allocator = try allocator.create(js_ast.ASTMemoryAllocator); + ast_memory_allocator.* = .{ .allocator = allocator }; + ast_memory_allocator.reset(); + ast_memory_allocator.push(); + + const bundler = try allocator.create(bun.Bundler); + + try completion.configureBundler(bundler, allocator); + + bundler.resolver.generation = generation; + + const this = try BundleV2.init( + bundler, + allocator, + JSC.AnyEventLoop.init(allocator), + false, + JSC.WorkPool.get(), + heap, + ); + + // switch (CompletionStruct) { + // bun.kit.DevServer.BundleTask => { + // this.kit_watcher = completion.route.dev.bun_watcher; + // }, + // else => {}, + // } + + this.plugins = completion.plugins; + this.completion = switch (CompletionStruct) { + BundleV2.JSBundleCompletionTask => .{ .js = completion }, + bun.kit.DevServer.BundleTask => .{ .kit = completion }, + else => @compileError("Unknown completion struct: " ++ CompletionStruct), + }; + completion.bundler = this; + + defer { + if (this.graph.pool.pool.threadpool_context == @as(?*anyopaque, @ptrCast(this.graph.pool))) { + this.graph.pool.pool.threadpool_context = null; + } + + ast_memory_allocator.pop(); + this.deinit(); + } + + errdefer { + // Wait for wait groups to finish. There still may be + this.linker.source_maps.line_offset_wait_group.wait(); + this.linker.source_maps.quoted_contents_wait_group.wait(); + + var out_log = Logger.Log.init(bun.default_allocator); + this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); + completion.log = out_log; + } + + completion.result = .{ + .value = .{ + .output_files = try this.runFromJSInNewThread(bundler.options.entry_points), + }, + }; + + var out_log = Logger.Log.init(bun.default_allocator); + this.bundler.log.appendToWithRecycled(&out_log, true) catch bun.outOfMemory(); + completion.log = out_log; + completion.completeOnBundleThread(); + } + }; +} + const UseDirective = js_ast.UseDirective; pub const ParseTask = struct { @@ -2526,7 +2636,8 @@ pub const ParseTask = struct { } pub const Result = struct { - task: EventLoop.Task = undefined, + task: EventLoop.Task, + ctx: *BundleV2, value: union(Tag) { success: Success, @@ -2864,7 +2975,11 @@ pub const ParseTask = struct { errdefer if (task.contents_or_fd == .fd) entry.deinit(allocator); - const will_close_file_descriptor = task.contents_or_fd == .fd and !entry.fd.isStdio() and this.ctx.bun_watcher == null; + const will_close_file_descriptor = task.contents_or_fd == .fd and + !entry.fd.isStdio() and + (this.ctx.bun_watcher == null + // and this.ctx.kit_watcher == null + ); if (will_close_file_descriptor) { _ = entry.closeFD(); } @@ -2895,14 +3010,13 @@ pub const ParseTask = struct { const target = targetFromHashbang(entry.contents) orelse use_directive.target(task.known_target orelse bundler.options.target); var opts = js_parser.Parser.Options.init(task.jsx, loader); - opts.legacy_transform_require_to_import = false; - opts.features.allow_runtime = !source.index.isRuntime(); - opts.features.use_import_meta_require = target.isBun(); + opts.bundle = true; opts.warn_about_unbundled_modules = false; opts.macro_context = &this.data.macro_context; - opts.bundle = true; opts.package_version = task.package_version; + opts.features.allow_runtime = !source.index.isRuntime(); + opts.features.use_import_meta_require = target.isBun(); opts.features.top_level_await = true; opts.features.auto_import_jsx = task.jsx.parse and bundler.options.auto_import_jsx; opts.features.trim_unused_imports = loader.isTypeScript() or (bundler.options.trim_unused_imports orelse false); @@ -2910,6 +3024,11 @@ pub const ParseTask = struct { opts.features.minify_syntax = bundler.options.minify_syntax; opts.features.minify_identifiers = bundler.options.minify_identifiers; opts.features.emit_decorator_metadata = bundler.options.emit_decorator_metadata; + opts.features.unwrap_commonjs_packages = bundler.options.unwrap_commonjs_packages; + opts.features.hot_module_reloading = bundler.options.output_format == .internal_kit_dev and !source.index.isRuntime(); + opts.features.react_fast_refresh = (bundler.options.hot_module_reloading or bundler.options.react_fast_refresh) and + loader.isJSX() and !source.path.isNodeModule(); + opts.ignore_dce_annotations = bundler.options.ignore_dce_annotations and !source.index.isRuntime(); // For files that are not user-specified entrypoints, set `import.meta.main` to `false`. @@ -2923,7 +3042,6 @@ pub const ParseTask = struct { opts.tree_shaking = if (source.index.isRuntime()) true else bundler.options.tree_shaking; opts.module_type = task.module_type; - opts.features.unwrap_commonjs_packages = bundler.options.unwrap_commonjs_packages; task.jsx.parse = loader.isJSX(); @@ -2944,7 +3062,7 @@ pub const ParseTask = struct { ast.target = target; if (ast.parts.len <= 1) { - task.side_effects = _resolver.SideEffects.no_side_effects__empty_ast; + task.side_effects = .no_side_effects__empty_ast; } if (task.presolved_source_indices.len > 0) { @@ -2995,6 +3113,8 @@ pub const ParseTask = struct { const result = bun.default_allocator.create(Result) catch unreachable; result.* = .{ + .ctx = this.ctx, + .task = undefined, .value = brk: { if (run_( this, @@ -3040,13 +3160,24 @@ pub const ParseTask = struct { }, }; - worker.ctx.loop().enqueueTaskConcurrent( - Result, - BundleV2, - result, - BundleV2.onParseTaskComplete, - .task, - ); + switch (worker.ctx.loop().*) { + .js => |jsc_event_loop| { + jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(result, onComplete)); + }, + .mini => |*mini| { + mini.enqueueTaskConcurrentWithExtraCtx( + Result, + BundleV2, + result, + BundleV2.onParseTaskComplete, + .task, + ); + }, + } + } + + pub fn onComplete(result: *Result) void { + BundleV2.onParseTaskComplete(result, result.ctx); } }; @@ -3245,8 +3376,6 @@ pub const Graph = struct { use_directive_entry_points: UseDirective.List = .{}, - // const_values: std.HashMapUnmanaged(Ref, Expr, Ref.HashCtx, 80) = .{}, - estimated_file_loader_count: usize = 0, additional_output_files: std.ArrayListUnmanaged(options.OutputFile) = .{}, @@ -3555,8 +3684,8 @@ const LinkerGraph = struct { // Track that this specific symbol was imported if (source_index_to_import_from.get() != source_index) { - var to_bind = &g.meta.items(.imports_to_bind)[source_index]; - try to_bind.put(g.allocator, ref, .{ + const imports_to_bind = &g.meta.items(.imports_to_bind)[source_index]; + try imports_to_bind.put(g.allocator, ref, .{ .data = .{ .source_index = source_index_to_import_from, .import_ref = ref, @@ -3895,7 +4024,7 @@ pub const LinkerContext = struct { pending_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), pub const LinkerOptions = struct { - output_format: options.OutputFormat = .esm, + output_format: options.Format = .esm, ignore_dce_annotations: bool = false, emit_dce_annotations: bool = true, tree_shaking: bool = true, @@ -3994,7 +4123,7 @@ pub const LinkerContext = struct { record.source_index.get() != source_index; } - inline fn shouldCallRuntimeRequire(format: options.OutputFormat) bool { + inline fn shouldCallRuntimeRequire(format: options.Format) bool { return format != .cjs; } @@ -4203,6 +4332,7 @@ pub const LinkerContext = struct { chunks: []Chunk, allocator: std.mem.Allocator, source_id: u32, + pub fn next(c: *@This(), chunk_id: usize) void { _ = c.chunks[chunk_id].files_with_parts_in_chunk.getOrPut(c.allocator, @as(u32, @truncate(c.source_id))) catch unreachable; } @@ -4816,7 +4946,6 @@ pub const LinkerContext = struct { // for entry point files in CommonJS format (or when in pass-through mode). if (kind == .cjs and (!entry_point_kinds[id].isEntryPoint() or output_format == .iife or output_format == .esm)) { flags[id].wrap = .cjs; - bun.assert(kind == .cjs); } } @@ -4991,7 +5120,7 @@ pub const LinkerContext = struct { // then we'll be using the actual CommonJS "exports" and/or "module" // symbols. In that case make sure to mark them as such so they don't // get minified. - if ((output_format == .cjs or output_format == .preserve) and + if ((output_format == .cjs) and entry_point_kinds[source_index].isEntryPoint() and export_kind == .cjs and flag.wrap == .none) { @@ -5081,7 +5210,7 @@ pub const LinkerContext = struct { count += "init_".len + ident_fmt_len; } - if (wrap != .cjs and export_kind != .cjs) { + if (wrap != .cjs and export_kind != .cjs and this.options.output_format != .internal_kit_dev) { count += "exports_".len + ident_fmt_len; count += "module_".len + ident_fmt_len; } @@ -5129,7 +5258,7 @@ pub const LinkerContext = struct { // actual CommonJS files from being renamed. This is purely about // aesthetics and is not about correctness. This is done here because by // this point, we know the CommonJS status will not change further. - if (wrap != .cjs and export_kind != .cjs) { + if (wrap != .cjs and export_kind != .cjs and this.options.output_format != .internal_kit_dev) { const exports_name = builder.fmt("exports_{}", .{source.fmtIdentifier()}); const module_name = builder.fmt("module_{}", .{source.fmtIdentifier()}); @@ -5277,7 +5406,7 @@ pub const LinkerContext = struct { this.graph.meta.items(.entry_point_part_index)[id] = Index.part(entry_point_part_index); // Pull in the "__toCommonJS" symbol if we need it due to being an entry point - if (force_include_exports) { + if (force_include_exports and this.options.output_format != .internal_kit_dev) { this.graph.generateRuntimeSymbolImportAndUse( source_index, Index.part(entry_point_part_index), @@ -5288,7 +5417,7 @@ pub const LinkerContext = struct { } // Encode import-specific constraints in the dependency graph - var import_records: []ImportRecord = import_records_list[id].slice(); + const import_records: []ImportRecord = import_records_list[id].slice(); debug("Binding {d} imports for file {s} (#{d})", .{ import_records.len, source.path.text, id }); for (parts, 0..) |*part, part_index| { @@ -5304,6 +5433,8 @@ pub const LinkerContext = struct { // Don't follow external imports (this includes import() expressions) if (!record.source_index.isValid() or this.isExternalDynamicImport(record, source_index)) { + if (this.options.output_format == .internal_kit_dev) continue; + // This is an external import. Check if it will be a "require()" call. if (kind == .require or !output_format.keepES6ImportExportSyntax() or kind == .dynamic) { if (record.source_index.isValid() and kind == .dynamic and ast_flags[other_id].force_cjs_to_esm) { @@ -5375,7 +5506,7 @@ pub const LinkerContext = struct { // This is an ES6 import of a CommonJS module, so it needs the // "__toESM" wrapper as long as it's not a bare "require()" - if (kind != .require and other_export_kind == .cjs) { + if (kind != .require and other_export_kind == .cjs and this.options.output_format != .internal_kit_dev) { record.wrap_with_to_esm = true; to_esm_uses += 1; } @@ -5424,33 +5555,6 @@ pub const LinkerContext = struct { } } - // If there's an ES6 import of a CommonJS module, then we're going to need the - // "__toESM" symbol from the runtime to wrap the result of "require()" - this.graph.generateRuntimeSymbolImportAndUse( - source_index, - Index.part(part_index), - "__toESM", - to_esm_uses, - ) catch unreachable; - - // If there's a CommonJS require of an ES6 module, then we're going to need the - // "__toCommonJS" symbol from the runtime to wrap the exports object - this.graph.generateRuntimeSymbolImportAndUse( - source_index, - Index.part(part_index), - "__toCommonJS", - to_common_js_uses, - ) catch unreachable; - - // If there are unbundled calls to "require()" and we're not generating - // code for node, then substitute a "__require" wrapper for "require". - this.graph.generateRuntimeSymbolImportAndUse( - source_index, - Index.part(part_index), - "__require", - runtime_require_uses, - ) catch unreachable; - // If there's an ES6 export star statement of a non-ES6 module, then we're // going to need the "__reExport" symbol from the runtime var re_export_uses: u32 = 0; @@ -5498,13 +5602,41 @@ pub const LinkerContext = struct { } } - this.graph.generateRuntimeSymbolImportAndUse( - source_index, - Index.part(part_index), + if (this.options.output_format != .internal_kit_dev) { + // If there's an ES6 import of a CommonJS module, then we're going to need the + // "__toESM" symbol from the runtime to wrap the result of "require()" + this.graph.generateRuntimeSymbolImportAndUse( + source_index, + Index.part(part_index), + "__toESM", + to_esm_uses, + ) catch unreachable; - "__reExport", - re_export_uses, - ) catch unreachable; + // If there's a CommonJS require of an ES6 module, then we're going to need the + // "__toCommonJS" symbol from the runtime to wrap the exports object + this.graph.generateRuntimeSymbolImportAndUse( + source_index, + Index.part(part_index), + "__toCommonJS", + to_common_js_uses, + ) catch unreachable; + + // If there are unbundled calls to "require()" and we're not generating + // code for node, then substitute a "__require" wrapper for "require". + this.graph.generateRuntimeSymbolImportAndUse( + source_index, + Index.part(part_index), + "__require", + runtime_require_uses, + ) catch unreachable; + + this.graph.generateRuntimeSymbolImportAndUse( + source_index, + Index.part(part_index), + "__reExport", + re_export_uses, + ) catch unreachable; + } } } } @@ -5713,7 +5845,7 @@ pub const LinkerContext = struct { // Initialize the part that was allocated for us earlier. The information // here will be used after this during tree shaking. c.graph.ast.items(.parts)[id].slice()[js_ast.namespace_export_part_index] = .{ - .stmts = all_export_stmts, + .stmts = if (c.options.output_format != .internal_kit_dev) all_export_stmts else &.{}, .symbol_uses = ns_export_symbol_uses, .dependencies = js_ast.Dependency.List.fromList(ns_export_dependencies), .declared_symbols = declared_symbols, @@ -6840,10 +6972,7 @@ pub const LinkerContext = struct { const runtimeRequireRef = if (c.resolver.opts.target.isBun()) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref); { - // TODO: IIFE indent - const print_options = js_printer.Options{ - // TODO: IIFE indent .indent = .{}, .has_run_symbol_renamer = true, @@ -6954,7 +7083,31 @@ pub const LinkerContext = struct { // TODO: directive - // TODO: IIFE wrap + // For Kit, hoist runtime.js outside of the IIFE + const compile_results = chunk.compile_results_for_chunk; + if (c.options.output_format == .internal_kit_dev) { + for (compile_results) |compile_result| { + const source_index = compile_result.sourceIndex(); + if (source_index != Index.runtime.value) break; + line_offset.advance(compile_result.code()); + j.push(compile_result.code(), bun.default_allocator); + } + } + + switch (c.options.output_format) { + .internal_kit_dev => { + const start = bun.kit.getHmrRuntime(if (c.options.target.isBun()) .server else .client); + j.pushStatic(start); + line_offset.advance(start); + }, + .iife => { + // Bun does not do arrow function lowering. So the wrapper can be an arrow. + const start = if (c.options.minify_whitespace) "(()=>{" else "(() => {\n"; + j.pushStatic(start); + line_offset.advance(start); + }, + else => {}, // no wrapper + } if (cross_chunk_prefix.len > 0) { newline_before_comment = true; @@ -6963,31 +7116,31 @@ pub const LinkerContext = struct { } // Concatenate the generated JavaScript chunks together - var prev_filename_comment: Index.Int = 0; - const compile_results = chunk.compile_results_for_chunk; - var compile_results_for_source_map = std.MultiArrayList(CompileResultForSourceMap){}; - compile_results_for_source_map.ensureUnusedCapacity(worker.allocator, compile_results.len) catch unreachable; + var compile_results_for_source_map: std.MultiArrayList(CompileResultForSourceMap) = .{}; + compile_results_for_source_map.setCapacity(worker.allocator, compile_results.len) catch bun.outOfMemory(); + + const show_comments = c.options.mode == .bundle and + !c.options.minify_whitespace; const sources: []const Logger.Source = c.parse_graph.input_files.items(.source); - for (@as([]CompileResult, compile_results)) |compile_result| { + for (compile_results) |compile_result| { const source_index = compile_result.sourceIndex(); const is_runtime = source_index == Index.runtime.value; // TODO: extracated legal comments // Add a comment with the file path before the file contents - if (c.options.mode == .bundle and !c.options.minify_whitespace and source_index != prev_filename_comment and compile_result.code().len > 0) { + if (show_comments and source_index != prev_filename_comment and compile_result.code().len > 0) { prev_filename_comment = source_index; + if (newline_before_comment) { j.pushStatic("\n"); line_offset.advance("\n"); } - // Make sure newlines in the path can't cause a syntax error. This does - // not minimize allocations because it's expected that this case never - // comes up in practice. + // Make sure newlines in the path can't cause a syntax error. const CommentType = enum { multiline, single, @@ -7001,6 +7154,13 @@ pub const LinkerContext = struct { else CommentType.single; + if (!c.options.minify_whitespace and + (c.options.output_format == .iife or c.options.output_format == .internal_kit_dev)) + { + j.pushStatic(" "); + line_offset.advance(" "); + } + switch (comment_type) { .multiline => { j.pushStatic("/* "); @@ -7025,12 +7185,13 @@ pub const LinkerContext = struct { line_offset.advance("\n"); }, } - prev_filename_comment = source_index; } if (is_runtime) { - line_offset.advance(compile_result.code()); - j.push(compile_result.code(), bun.default_allocator); + if (c.options.output_format != .internal_kit_dev) { + line_offset.advance(compile_result.code()); + j.push(compile_result.code(), bun.default_allocator); + } } else { j.push(compile_result.code(), bun.default_allocator); @@ -7070,15 +7231,44 @@ pub const LinkerContext = struct { j.push(cross_chunk_suffix, bun.default_allocator); } - if (c.options.output_format == .iife) { - const without_newline = "})();"; + switch (c.options.output_format) { + .iife => { + const without_newline = "})();"; - const with_newline = if (newline_before_comment) - without_newline ++ "\n" - else - without_newline; + const with_newline = if (newline_before_comment) + without_newline ++ "\n" + else + without_newline; - j.pushStatic(with_newline); + j.pushStatic(with_newline); + }, + .internal_kit_dev => { + { + const str = "}, {\n main: "; + j.pushStatic(str); + line_offset.advance(str); + } + { + const input = c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path; + // var buf = MutableString.initEmpty(c.allocator); + // js_printer.quoteForJSONBuffer(input.pretty, &buf, true) catch bun.outOfMemory(); + // const str = buf.toOwnedSliceLeaky(); // c.allocator is an arena + const str = try std.fmt.allocPrint(c.allocator, "{d}", .{input.hashForKit()}); + j.pushStatic(str); + line_offset.advance(str); + } + // { + // const str = "\n react_refresh: "; + // j.pushStatic(str); + // line_offset.advance(str); + // } + { + const str = "\n});"; + j.pushStatic(str); + line_offset.advance(str); + } + }, + else => {}, } j.ensureNewlineAtEnd(); @@ -7370,9 +7560,6 @@ pub const LinkerContext = struct { const ast: JSAst = c.graph.ast.get(source_index); switch (c.options.output_format) { - // TODO: - .preserve => {}, - .esm => { switch (flags.wrap) { .cjs => { @@ -7687,6 +7874,11 @@ pub const LinkerContext = struct { // TODO: iife .iife => {}, + .internal_kit_dev => { + // nothing needs to be done here, as the exports are already + // forwarded in the module closure. + }, + .cjs => { switch (flags.wrap) { .cjs => { @@ -8027,7 +8219,6 @@ pub const LinkerContext = struct { /// In that case, when bundling, we still need to preserve that module /// namespace object (foo) because we cannot know what they are going to /// attempt to access statically - /// fn convertStmtsForChunk( c: *LinkerContext, source_index: u32, @@ -8038,6 +8229,12 @@ pub const LinkerContext = struct { wrap: WrapKind, ast: *const JSAst, ) !void { + // for Bun Kit, export wrapping is already done. Import wrapping is special cased. + if (c.options.output_format == .internal_kit_dev and source_index != Index.runtime.value) { + try c.convertStmtsForChunkKit(source_index, stmts, part_stmts, allocator, ast); + return; + } + const shouldExtractESMStmtsForWrap = wrap != .none; const shouldStripExports = c.options.mode != .passthrough or c.graph.files.items(.entry_point_kind)[source_index] != .none; @@ -8348,7 +8545,6 @@ pub const LinkerContext = struct { }, .s_function => |s| { - // Strip the "export" keyword while bundling if (shouldStripExports and s.func.flags.contains(.is_export)) { // Be c areful to not modify the original statement @@ -8364,10 +8560,9 @@ pub const LinkerContext = struct { }, .s_class => |s| { - // Strip the "export" keyword while bundling if (shouldStripExports and s.is_export) { - // Be c areful to not modify the original statement + // Be careful to not modify the original statement stmt = Stmt.alloc( S.Class, S.Class{ @@ -8382,7 +8577,7 @@ pub const LinkerContext = struct { .s_local => |s| { // Strip the "export" keyword while bundling if (shouldStripExports and s.is_export) { - // Be c areful to not modify the original statement + // Be careful to not modify the original statement stmt = Stmt.alloc( S.Local, s.*, @@ -8526,6 +8721,112 @@ pub const LinkerContext = struct { } } + /// The conversion logic is completely different for format .kit_internal_hmr + fn convertStmtsForChunkKit( + c: *LinkerContext, + source_index: u32, + stmts: *StmtList, + part_stmts: []const js_ast.Stmt, + allocator: std.mem.Allocator, + ast: *const JSAst, + ) !void { + _ = source_index; // autofix + + const receiver_args = try allocator.dupe(G.Arg, &.{ + .{ .binding = Binding.alloc(allocator, B.Identifier{ .ref = ast.module_ref }, Logger.Loc.Empty) }, + }); + const module_id = Expr.initIdentifier(ast.module_ref, Logger.Loc.Empty); + for (part_stmts) |stmt| { + switch (stmt.data) { + else => { + try stmts.inside_wrapper_suffix.append(stmt); + }, + .s_local => |st| { + // TODO: check if this local is immediately assigned + // `require()` if so, we will instrument it with hot module + // reloading. other cases of `require` won't receive receive + // updates. + _ = st; // autofix + + try stmts.inside_wrapper_suffix.append(stmt); + }, + .s_import => |st| { + // hmr-runtime.ts defines `module.importSync` to be + // a synchronous import. this is different from + // require in that esm <-> cjs is handled + // automatically, instead of with bundler-added + // annotations like '__commonJS'. + // + // this is not done in the parse step because the final + // pretty path is not yet known. the other statement types + // are not handled here because some of those generate + // new local variables (it is too late to do that here). + const record = ast.import_records.at(st.import_record_index); + const path = c.parse_graph.input_files.items(.source)[record.source_index.get()].path; + + const is_bare_import = st.star_name_loc == null and st.items.len == 0 and st.default_name == null; + + const key_expr = Expr.init(E.InlinedEnum, .{ + .comment = path.pretty, + .value = Expr.init(E.Number, .{ + .value = @floatFromInt(path.hashForKit()), + }, stmt.loc), + }, stmt.loc); + + // module.importSync('path', (module) => ns = module) + const call = Expr.init(E.Call, .{ + .target = Expr.init(E.Dot, .{ + .target = module_id, + .name = "importSync", + .name_loc = stmt.loc, + }, stmt.loc), + .args = js_ast.ExprNodeList.init( + try allocator.dupe(Expr, if (is_bare_import) + &.{key_expr} + else + &.{ + key_expr, + Expr.init(E.Arrow, .{ + .args = receiver_args, + .body = .{ + .stmts = try allocator.dupe(Stmt, &.{Stmt.alloc(S.Return, .{ + .value = Expr.assign( + Expr.initIdentifier(st.namespace_ref, st.star_name_loc orelse stmt.loc), + module_id, + ), + }, stmt.loc)}), + .loc = stmt.loc, + }, + .prefer_expr = true, + }, stmt.loc), + }), + ), + }, stmt.loc); + + if (is_bare_import) { + // the import value is never read + try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.SExpr, .{ .value = call }, stmt.loc)); + } else { + // 'let namespace = module.importSync(...)' + try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.Local, .{ + .kind = .k_let, + .decls = try G.Decl.List.fromSlice(allocator, &.{.{ + .binding = Binding.alloc( + allocator, + B.Identifier{ .ref = st.namespace_ref }, + st.star_name_loc orelse stmt.loc, + ), + .value = call, + }}), + }, stmt.loc)); + } + + continue; + }, + } + } + } + fn runtimeFunction(c: *LinkerContext, name: []const u8) Ref { return c.graph.runtimeFunction(name); } @@ -8543,10 +8844,7 @@ pub const LinkerContext = struct { allocator: std.mem.Allocator, temp_allocator: std.mem.Allocator, ) js_printer.PrintResult { - - // var file = &c.graph.files.items(.input_file)[part.source_index.get()]; const parts: []js_ast.Part = c.graph.ast.items(.parts)[part_range.source_index.get()].slice()[part_range.part_index_begin..part_range.part_index_end]; - // const resolved_exports: []ResolvedExports = c.graph.meta.items(.resolved_exports); const all_flags: []const JSMeta.Flags = c.graph.meta.items(.flags); const flags = all_flags[part_range.source_index.get()]; const wrapper_part_index = if (flags.wrap != .none) @@ -8585,8 +8883,9 @@ pub const LinkerContext = struct { temp_allocator, flags.wrap, &ast, - ) catch |err| return .{ - .err = err, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + return .{ .err = err }; }; switch (flags.wrap) { @@ -8741,24 +9040,55 @@ pub const LinkerContext = struct { var out_stmts: []js_ast.Stmt = stmts.all_stmts.items; + // Turn each module into a function if this is Kit + var stmt_storage: Stmt = undefined; + if (c.options.output_format == .internal_kit_dev and !part_range.source_index.isRuntime()) { + var clousure_args = std.BoundedArray(G.Arg, 2).fromSlice(&.{ + .{ .binding = Binding.alloc(temp_allocator, B.Identifier{ + .ref = ast.module_ref, + }, Logger.Loc.Empty) }, + }) catch unreachable; // is within bounds + + if (flags.wrap == .cjs and ast.flags.uses_exports_ref) { + clousure_args.appendAssumeCapacity( + .{ + .binding = Binding.alloc(temp_allocator, B.Identifier{ + .ref = ast.exports_ref, + }, Logger.Loc.Empty), + .default = Expr.allocate(temp_allocator, E.Dot, .{ + .target = Expr.initIdentifier(ast.module_ref, Logger.Loc.Empty), + .name = "exports", + .name_loc = Logger.Loc.Empty, + }, Logger.Loc.Empty), + }, + ); + } + + stmt_storage = Stmt.allocateExpr(temp_allocator, Expr.init(E.Function, .{ .func = .{ + .args = temp_allocator.dupe(G.Arg, clousure_args.slice()) catch bun.outOfMemory(), + .body = .{ + .stmts = stmts.all_stmts.items, + .loc = Logger.Loc.Empty, + }, + } }, Logger.Loc.Empty)); + out_stmts = (&stmt_storage)[0..1]; + } // Optionally wrap all statements in a closure - if (needs_wrapper) { + else if (needs_wrapper) { switch (flags.wrap) { .cjs => { - const uses_exports_ref = ast.uses_exports_ref(); - // Only include the arguments that are actually used - var args = std.ArrayList(js_ast.G.Arg).initCapacity( + var args = std.ArrayList(G.Arg).initCapacity( temp_allocator, - if (ast.uses_module_ref() or uses_exports_ref) 2 else 0, + if (ast.flags.uses_module_ref or ast.flags.uses_exports_ref) 2 else 0, ) catch unreachable; - if (ast.uses_module_ref() or uses_exports_ref) { + if (ast.flags.uses_module_ref or ast.flags.uses_exports_ref) { args.appendAssumeCapacity( - js_ast.G.Arg{ - .binding = js_ast.Binding.alloc( + G.Arg{ + .binding = Binding.alloc( temp_allocator, - js_ast.B.Identifier{ + B.Identifier{ .ref = ast.exports_ref, }, Logger.Loc.Empty, @@ -8766,12 +9096,12 @@ pub const LinkerContext = struct { }, ); - if (ast.uses_module_ref()) { + if (ast.flags.uses_module_ref) { args.appendAssumeCapacity( - js_ast.G.Arg{ - .binding = js_ast.Binding.alloc( + G.Arg{ + .binding = Binding.alloc( temp_allocator, - js_ast.B.Identifier{ + B.Identifier{ .ref = ast.module_ref, }, Logger.Loc.Empty, @@ -9059,7 +9389,10 @@ pub const LinkerContext = struct { .indent = .{}, .commonjs_named_exports = ast.commonjs_named_exports, .commonjs_named_exports_ref = ast.exports_ref, - .commonjs_module_ref = if (ast.flags.uses_module_ref) ast.module_ref else Ref.None, + .commonjs_module_ref = if (ast.flags.uses_module_ref or c.options.output_format == .internal_kit_dev) + ast.module_ref + else + Ref.None, .commonjs_named_exports_deoptimized = flags.wrap == .cjs, .commonjs_module_exports_assigned_deoptimized = ast.flags.commonjs_module_exports_assigned_deoptimized, // .const_values = c.graph.const_values, @@ -9067,14 +9400,17 @@ pub const LinkerContext = struct { .minify_whitespace = c.options.minify_whitespace, .minify_syntax = c.options.minify_syntax, - .module_type = c.options.output_format, + .module_type = switch (c.options.output_format) { + else => |format| format, + .internal_kit_dev => if (part_range.source_index.isRuntime()) .esm else .internal_kit_dev, + }, .print_dce_annotations = c.options.emit_dce_annotations, .has_run_symbol_renamer = true, .allocator = allocator, .to_esm_ref = toESMRef, .to_commonjs_ref = toCommonJSRef, - .require_ref = runtimeRequireRef, + .require_ref = if (c.options.output_format == .internal_kit_dev) ast.require_ref else runtimeRequireRef, .require_or_import_meta_for_source_callback = js_printer.RequireOrImportMeta.Callback.init( LinkerContext, requireOrImportMetaForSource, @@ -9082,6 +9418,11 @@ pub const LinkerContext = struct { ), .line_offset_tables = c.graph.files.items(.line_offset_table)[part_range.source_index.get()], .target = c.options.target, + + .input_files_for_kit = if (c.options.output_format == .internal_kit_dev and !part_range.source_index.isRuntime()) + c.parse_graph.input_files.items(.source) + else + null, }; writer.buffer.reset(); @@ -9178,7 +9519,7 @@ pub const LinkerContext = struct { for (chunks, chunk_contexts) |*chunk, *chunk_ctx| { chunk_ctx.* = .{ .wg = wait_group, .c = c, .chunks = chunks, .chunk = chunk }; total_count += chunk.content.javascript.parts_in_chunk_in_order.len; - chunk.compile_results_for_chunk = c.allocator.alloc(CompileResult, chunk.content.javascript.parts_in_chunk_in_order.len) catch unreachable; + chunk.compile_results_for_chunk = c.allocator.alloc(CompileResult, chunk.content.javascript.parts_in_chunk_in_order.len) catch bun.outOfMemory(); } debug(" START {d} compiling part ranges", .{total_count}); @@ -10626,33 +10967,33 @@ pub const LinkerContext = struct { // However, that generation is special-cased for various reasons and is // done later on. Still, we're going to need to ensure that this file // both depends on the "__commonJS" symbol and declares the "require_foo" - // symbol. Instead of special-casing this during the reachablity analysis + // symbol. Instead of special-casing this during the reachability analysis // below, we just append a dummy part to the end of the file with these - // dependencies and let the general-purpose reachablity analysis take care + // dependencies and let the general-purpose reachability analysis take care // of it. .cjs => { const common_js_parts = c.topLevelSymbolsToPartsForRuntime(c.cjs_runtime_ref); - var total_dependencies_count = common_js_parts.len; - var runtime_parts = c.graph.ast.items(.parts)[Index.runtime.get()].slice(); - for (common_js_parts) |part_id| { - var part: *js_ast.Part = &runtime_parts[part_id]; + const runtime_parts = c.graph.ast.items(.parts)[Index.runtime.get()].slice(); + const part: *js_ast.Part = &runtime_parts[part_id]; const symbol_refs = part.symbol_uses.keys(); for (symbol_refs) |ref| { if (ref.eql(c.cjs_runtime_ref)) continue; - total_dependencies_count += c.topLevelSymbolsToPartsForRuntime(ref).len; } } - // generate a dummy part that depends on the "__commonJS" symbol - const dependencies = c.allocator.alloc(js_ast.Dependency, common_js_parts.len) catch unreachable; - for (common_js_parts, dependencies) |part, *cjs| { - cjs.* = .{ - .part_index = part, - .source_index = Index.runtime, - }; - } + // Generate a dummy part that depends on the "__commonJS" symbol. + const dependencies: []js_ast.Dependency = if (c.options.output_format != .internal_kit_dev) brk: { + const dependencies = c.allocator.alloc(js_ast.Dependency, common_js_parts.len) catch bun.outOfMemory(); + for (common_js_parts, dependencies) |part, *cjs| { + cjs.* = .{ + .part_index = part, + .source_index = Index.runtime, + }; + } + break :brk dependencies; + } else &.{}; const part_index = c.graph.addPartToFile( source_index, .{ @@ -10677,13 +11018,17 @@ pub const LinkerContext = struct { ) catch unreachable; bun.assert(part_index != js_ast.namespace_export_part_index); wrapper_part_index.* = Index.part(part_index); - c.graph.generateSymbolImportAndUse( - source_index, - part_index, - c.cjs_runtime_ref, - 1, - Index.runtime, - ) catch unreachable; + + // Kit uses a wrapping approach that does not use __commonJS + if (c.options.output_format != .internal_kit_dev) { + c.graph.generateSymbolImportAndUse( + source_index, + part_index, + c.cjs_runtime_ref, + 1, + Index.runtime, + ) catch unreachable; + } }, .esm => { @@ -10697,7 +11042,7 @@ pub const LinkerContext = struct { // // This depends on the "__esm" symbol and declares the "init_foo" symbol // for similar reasons to the CommonJS closure above. - const esm_parts = if (wrapper_ref.isValid()) + const esm_parts = if (wrapper_ref.isValid() and c.options.output_format != .internal_kit_dev) c.topLevelSymbolsToPartsForRuntime(c.esm_runtime_ref) else &.{}; @@ -10729,15 +11074,14 @@ pub const LinkerContext = struct { ) catch unreachable; bun.assert(part_index != js_ast.namespace_export_part_index); wrapper_part_index.* = Index.part(part_index); - - if (wrapper_ref.isValid()) { + if (wrapper_ref.isValid() and c.options.output_format != .internal_kit_dev) { c.graph.generateSymbolImportAndUse( source_index, part_index, c.esm_runtime_ref, 1, Index.runtime, - ) catch unreachable; + ) catch bun.outOfMemory(); } }, else => {}, @@ -11214,7 +11558,7 @@ pub const LinkerContext = struct { export_star_map: std.AutoHashMap(Index.Int, void), entry_point_kinds: []EntryPoint.Kind, export_star_records: [][]u32, - output_format: options.OutputFormat, + output_format: options.Format, pub fn hasDynamicExportsDueToExportStar(this: *DependencyWrapper, source_index: Index.Int) bool { // Terminate the traversal now if this file already has dynamic exports diff --git a/src/cli.zig b/src/cli.zig index 027ce2b495..ad59352479 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -252,6 +252,7 @@ pub const Arguments = struct { clap.parseParam("--entry-naming Customize entry point filenames. Defaults to \"[dir]/[name].[ext]\"") catch unreachable, clap.parseParam("--chunk-naming Customize chunk filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable, clap.parseParam("--asset-naming Customize asset filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable, + clap.parseParam("--react-fast-refresh Enable React Fast Refresh transform (does not emit hot-module code, use this for testing)") catch unreachable, clap.parseParam("--server-components Enable React Server Components (experimental)") catch unreachable, clap.parseParam("--no-bundle Transpile file only, do not bundle") catch unreachable, clap.parseParam("--emit-dce-annotations Re-emit DCE annotations in bundles. Enabled by default unless --minify-whitespace is passed.") catch unreachable, @@ -737,9 +738,9 @@ pub const Arguments = struct { !ctx.bundler_options.minify_whitespace; if (args.options("--external").len > 0) { - var externals = try allocator.alloc([]u8, args.options("--external").len); + var externals = try allocator.alloc([]const u8, args.options("--external").len); for (args.options("--external"), 0..) |external, i| { - externals[i] = @constCast(external); + externals[i] = external; } opts.external = externals; } @@ -815,16 +816,27 @@ pub const Arguments = struct { if (args.option("--format")) |format_str| { const format = options.Format.fromString(format_str) orelse { - Output.prettyErrorln("error: Invalid format - must be esm, cjs, or iife", .{}); + Output.errGeneric("Invalid format - must be esm, cjs, or iife", .{}); Global.crash(); }; + switch (format) { - .esm => {}, - else => { - Output.prettyErrorln("error: Formats besides 'esm' are not implemented", .{}); - Global.crash(); + .internal_kit_dev => { + bun.Output.warn("--format={s} is for debugging only, and may experience breaking changes at any moment", .{format_str}); + bun.Output.flush(); }, + .cjs => { + // Make this a soft error in debug to allow experimenting with these flags. + const function = if (Environment.isDebug) Output.debugWarn else Output.errGeneric; + function("Format '{s}' are not implemented", .{@tagName(format)}); + if (!Environment.isDebug) { + Global.crash(); + } + }, + else => {}, } + + ctx.bundler_options.output_format = format; } if (args.flag("--splitting")) { @@ -843,10 +855,12 @@ pub const Arguments = struct { ctx.bundler_options.asset_naming = try strings.concat(allocator, &.{ "./", bun.strings.removeLeadingDotSlash(asset_naming) }); } - if (comptime FeatureFlags.react_server_components) { - if (args.flag("--server-components")) { - ctx.bundler_options.react_server_components = true; - } + if (args.flag("--server-components")) { + ctx.bundler_options.react_server_components = true; + } + + if (args.flag("--react-fast-refresh")) { + ctx.bundler_options.react_fast_refresh = true; } if (args.option("--sourcemap")) |setting| { @@ -1307,6 +1321,7 @@ pub const Command = struct { chunk_naming: []const u8 = "./[name]-[hash].[ext]", asset_naming: []const u8 = "./[name]-[hash].[ext]", react_server_components: bool = false, + react_fast_refresh: bool = false, code_splitting: bool = false, transform_only: bool = false, inline_entrypoint_import_meta_main: bool = false, @@ -1315,6 +1330,7 @@ pub const Command = struct { minify_identifiers: bool = false, ignore_dce_annotations: bool = false, emit_dce_annotations: bool = true, + output_format: options.Format = .esm, }; pub fn create(allocator: std.mem.Allocator, log: *logger.Log, comptime command: Command.Tag) anyerror!Context { diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 29e9c606db..f5bb2afd6c 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -77,10 +77,8 @@ pub const BuildCommand = struct { var this_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null); this_bundler.options.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); - this_bundler.resolver.opts.source_map = options.SourceMapOption.fromApi(ctx.args.source_map); this_bundler.options.compile = ctx.bundler_options.compile; - this_bundler.resolver.opts.compile = ctx.bundler_options.compile; if (this_bundler.options.source_map == .external and ctx.bundler_options.outdir.len == 0 and !ctx.bundler_options.compile) { Output.prettyErrorln("error: cannot use an external source map without --outdir", .{}); @@ -90,38 +88,18 @@ pub const BuildCommand = struct { var outfile = ctx.bundler_options.outfile; this_bundler.options.public_path = ctx.bundler_options.public_path; - this_bundler.resolver.opts.public_path = ctx.bundler_options.public_path; - this_bundler.options.entry_naming = ctx.bundler_options.entry_naming; this_bundler.options.chunk_naming = ctx.bundler_options.chunk_naming; this_bundler.options.asset_naming = ctx.bundler_options.asset_naming; - this_bundler.resolver.opts.entry_naming = ctx.bundler_options.entry_naming; - this_bundler.resolver.opts.chunk_naming = ctx.bundler_options.chunk_naming; - this_bundler.resolver.opts.asset_naming = ctx.bundler_options.asset_naming; - this_bundler.options.react_server_components = ctx.bundler_options.react_server_components; - this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components; - + this_bundler.options.react_fast_refresh = ctx.bundler_options.react_fast_refresh; this_bundler.options.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; - this_bundler.resolver.opts.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main; - this_bundler.options.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.options.minify_syntax = ctx.bundler_options.minify_syntax; - this_bundler.resolver.opts.minify_syntax = ctx.bundler_options.minify_syntax; - this_bundler.options.minify_whitespace = ctx.bundler_options.minify_whitespace; - this_bundler.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace; - this_bundler.options.minify_identifiers = ctx.bundler_options.minify_identifiers; - this_bundler.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers; - this_bundler.options.emit_dce_annotations = ctx.bundler_options.emit_dce_annotations; - this_bundler.resolver.opts.emit_dce_annotations = ctx.bundler_options.emit_dce_annotations; - this_bundler.options.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations; - this_bundler.resolver.opts.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations; if (ctx.bundler_options.compile) { if (ctx.bundler_options.code_splitting) { @@ -139,7 +117,6 @@ pub const BuildCommand = struct { const base_public_path = bun.StandaloneModuleGraph.targetBasePublicPath(compile_target.os, "root/"); this_bundler.options.public_path = base_public_path; - this_bundler.resolver.opts.public_path = base_public_path; if (outfile.len == 0) { outfile = std.fs.path.basename(this_bundler.options.entry_points[0]); @@ -185,7 +162,7 @@ pub const BuildCommand = struct { } this_bundler.options.output_dir = ctx.bundler_options.outdir; - this_bundler.resolver.opts.output_dir = ctx.bundler_options.outdir; + this_bundler.options.output_format = ctx.bundler_options.output_format; var src_root_dir_buf: bun.PathBuffer = undefined; const src_root_dir: string = brk1: { @@ -214,17 +191,15 @@ pub const BuildCommand = struct { }; this_bundler.options.root_dir = src_root_dir; - this_bundler.resolver.opts.root_dir = src_root_dir; - - this_bundler.options.react_server_components = ctx.bundler_options.react_server_components; - this_bundler.resolver.opts.react_server_components = ctx.bundler_options.react_server_components; this_bundler.options.code_splitting = ctx.bundler_options.code_splitting; - this_bundler.resolver.opts.code_splitting = ctx.bundler_options.code_splitting; this_bundler.options.transform_only = ctx.bundler_options.transform_only; + if (this_bundler.options.transform_only) { this_bundler.options.resolve_mode = .disable; } + this_bundler.resolver.opts = this_bundler.options; + this_bundler.configureLinker(); // This step is optional @@ -352,7 +327,7 @@ pub const BuildCommand = struct { std.fs.cwd() else std.fs.cwd().makeOpenPath(root_path, .{}) catch |err| { - Output.prettyErrorln("{s} while attemping to open output directory {}", .{ @errorName(err), bun.fmt.quote(root_path) }); + Output.prettyErrorln("{s} while attempting to open output directory {}", .{ @errorName(err), bun.fmt.quote(root_path) }); exitOrWatch(1, ctx.debug.hot_reload == .watch); unreachable; }; diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 86201c5526..a6ccf6f4ad 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -1206,7 +1206,7 @@ pub const TestCommand = struct { reporter.summary.files += 1; switch (promise.status(vm.global.vm())) { - .Rejected => { + .rejected => { _ = vm.unhandledRejection(vm.global, promise.result(vm.global.vm()), promise.asValue()); reporter.summary.fail += 1; diff --git a/src/codegen/buildTypeFlag.ts b/src/codegen/buildTypeFlag.ts index fa4717f696..78800cbe3d 100644 --- a/src/codegen/buildTypeFlag.ts +++ b/src/codegen/buildTypeFlag.ts @@ -4,13 +4,15 @@ const buildTypeFlag = process.argv.find(argv => { } }); -const enum BuildType { +enum BuildType { debug, release, } + if (buildTypeFlag) { process.argv.splice(process.argv.indexOf(buildTypeFlag), 1); } + let buildType = buildTypeFlag ? BuildType[buildTypeFlag.split("=")[1].toLowerCase()] : BuildType.release; export { BuildType, buildType }; diff --git a/src/codegen/kit-codegen.ts b/src/codegen/kit-codegen.ts new file mode 100644 index 0000000000..e6ec16d40a --- /dev/null +++ b/src/codegen/kit-codegen.ts @@ -0,0 +1,152 @@ +import { join, basename } from 'node:path'; +import { writeFileSync, existsSync } from 'node:fs'; +import assert from 'node:assert'; + +// arg parsing +const options = {}; +for (const arg of process.argv.slice(2)) { + if(!arg.startsWith('--')) { + console.error('Unknown argument ' + arg); + process.exit(1); + } + const split = arg.split('='); + const value = split[1] || 'true'; + options[split[0].slice(2)] = value; +} + +let { codegen_root, debug } = options as any; +if (!codegen_root) {console.error('Missing --codegen_root=...'); process.exit(1);} +if (debug === 'false' || debug === '0' || debug == 'OFF') debug = false; + +const kit_dir = join(import.meta.dirname, '../kit'); +process.chdir(kit_dir); // to make bun build predictable in development + +const results = await Promise.allSettled(['client', 'server'].map(async mode => { + let result = await Bun.build({ + entrypoints: [join(kit_dir, 'hmr-runtime.ts')], + define: { + mode: JSON.stringify(mode), + IS_BUN_DEVELOPMENT: String(!!debug), + }, + minify: { + syntax: true, + } + }); + if(!result.success) throw new AggregateError(result.logs); + assert(result.outputs.length === 1, 'must bundle to a single file'); + // @ts-ignore + let code = await result.outputs[0].text(); + + // A second pass is used to convert global variables into parameters, while + // allowing for renaming to properly function when minification is enabled. + const in_names = [ + 'input_graph', + 'config', + mode === 'server' && 'server_fetch_function' + ].filter(Boolean); + const combined_source = ` + __marker__; + let ${in_names.join(',')}; + __marker__(${in_names.join(',')}); + ${code}; + `; + const generated_entrypoint = join(kit_dir, `.runtime-${mode}.generated.ts`); + + writeFileSync(generated_entrypoint, combined_source); + using _ = {[Symbol.dispose] : () => { + try { + rmSync(generated_entrypoint); + } catch {} + }}; + + result = await Bun.build({ + entrypoints: [generated_entrypoint], + minify: { + syntax: true, + whitespace: !debug, + identifiers: !debug, + } + }); + if(!result.success) throw new AggregateError(result.logs); + assert(result.outputs.length === 1, 'must bundle to a single file'); + // @ts-ignore + code = await result.outputs[0].text(); + + let names: string = ''; + code = code + .replace(/(\n?)\s*__marker__.*__marker__\((.+?)\);\s*/s, (_, n, captured) => { + names = captured; + return n; + }) + .replace(`// ${basename(generated_entrypoint)}`, '') + .trim(); + assert(names, 'missing name'); + + if (debug) { + code = '\n ' + code.replace(/\n/g, '\n ') + '\n'; + } + + if (code[code.length - 1] === ';') code = code.slice(0, -1); + + if (mode === 'server') { + const server_fetch_function = names.split(',')[2].trim(); + code = debug + ? `${code} return ${server_fetch_function};\n` + : `${code};return ${server_fetch_function};` + } + + code = debug + ? `((${names}) => {${code}})({\n` + : `((${names})=>{${code}})({`; + + if (mode === 'server') { + code = `export default await ${code}`; + } + + writeFileSync(join(codegen_root, `kit.${mode}.js`), code); +})); + +// print failures in a de-duplicated fashion. +interface Err { kind: 'client' | 'server' | 'both', err: any } +const failed = [ + { kind: 'client', result: results[0] }, + { kind: 'server', result: results[1] }, +] + .filter(x => x.result.status === 'rejected') + .map(x => ({ kind: x.kind, err: x.result.reason })) as Err[]; +if(failed.length > 0) { + const flattened_errors: Err[] = []; + for (const { kind, err } of failed) { + if (err instanceof AggregateError) { + flattened_errors.push(...err.errors.map(err => ({ kind, err }))); + } + flattened_errors.push({ kind, err }); + } + for(let i = 0; i < flattened_errors.length; i++) { + const x = flattened_errors[i]; + if (!x.err?.message) continue; + for (const other of flattened_errors.slice(0, i)) { + if(other.err?.message === x.err.message || other.err.stack === x.err.stack) { + other.kind = 'both'; + flattened_errors.splice(i, 1); + i -= 1; + continue; + } + } + } + let current = ''; + for(const { kind, err } of flattened_errors) { + if(kind !== current) { + const map = { both: 'runtime', client: 'client runtime', server: 'server runtime' } + console.error(`Errors while bundling Kit ${map[kind]}:`); + } + console.error(err); + } + process.exit(1); +} else { + console.log('-> kit.client.js, kit.server.js'); + + const empty_file = join(codegen_root, 'kit_empty_file'); + if (!existsSync(empty_file)) + writeFileSync(empty_file, 'this is used to fulfill a cmake dependency'); +} diff --git a/src/defines.zig b/src/defines.zig index 296bd987a4..43cdd144b6 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -57,6 +57,13 @@ pub const DefineData = struct { return self.valueless; } + pub fn initBoolean(value: bool) DefineData { + return .{ + .value = .{ .e_boolean = .{ .value = value } }, + .can_be_removed_if_unused = true, + }; + } + pub fn merge(a: DefineData, b: DefineData) DefineData { return DefineData{ .value = b.value, @@ -165,6 +172,8 @@ pub const Define = struct { dots: bun.StringHashMap([]DotDefine), allocator: std.mem.Allocator, + pub const Data = DefineData; + pub fn forIdentifier(this: *const Define, name: []const u8) ?IdentifierDefine { if (this.identifiers.get(name)) |data| { return data; @@ -174,58 +183,60 @@ pub const Define = struct { } pub fn insertFromIterator(define: *Define, allocator: std.mem.Allocator, comptime Iterator: type, iter: Iterator) !void { - outer: while (iter.next()) |user_define| { - const user_define_key = user_define.key_ptr.*; - // If it has a dot, then it's a DotDefine. - // e.g. process.env.NODE_ENV - if (strings.lastIndexOfChar(user_define_key, '.')) |last_dot| { - const tail = user_define_key[last_dot + 1 .. user_define_key.len]; - const remainder = user_define_key[0..last_dot]; - const count = std.mem.count(u8, remainder, ".") + 1; - var parts = try allocator.alloc(string, count + 1); - var splitter = std.mem.split(u8, remainder, "."); - var i: usize = 0; - while (splitter.next()) |split| : (i += 1) { - parts[i] = split; - } - parts[i] = tail; - var initial_values: []DotDefine = &([_]DotDefine{}); - - // "NODE_ENV" - const gpe_entry = try define.dots.getOrPut(tail); - - if (gpe_entry.found_existing) { - for (gpe_entry.value_ptr.*) |*part| { - // ["process", "env"] === ["process", "env"] (if that actually worked) - if (arePartsEqual(part.parts, parts)) { - part.data = part.data.merge(user_define.value_ptr.*); - continue :outer; - } - } - - initial_values = gpe_entry.value_ptr.*; - } - - var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1); - if (initial_values.len > 0) { - list.appendSliceAssumeCapacity(initial_values); - } - - list.appendAssumeCapacity(DotDefine{ - .data = user_define.value_ptr.*, - // TODO: do we need to allocate this? - .parts = parts, - }); - gpe_entry.value_ptr.* = try list.toOwnedSlice(); - } else { - - // e.g. IS_BROWSER - try define.identifiers.put(user_define_key, user_define.value_ptr.*); - } + while (iter.next()) |user_define| { + try define.insert(allocator, user_define.key_ptr.*, user_define.value_ptr.*); } } - pub fn init(allocator: std.mem.Allocator, _user_defines: ?UserDefines, string_defines: ?UserDefinesArray) !*@This() { + pub fn insert(define: *Define, allocator: std.mem.Allocator, key: []const u8, value: DefineData) !void { + // If it has a dot, then it's a DotDefine. + // e.g. process.env.NODE_ENV + if (strings.lastIndexOfChar(key, '.')) |last_dot| { + const tail = key[last_dot + 1 .. key.len]; + const remainder = key[0..last_dot]; + const count = std.mem.count(u8, remainder, ".") + 1; + var parts = try allocator.alloc(string, count + 1); + var splitter = std.mem.split(u8, remainder, "."); + var i: usize = 0; + while (splitter.next()) |split| : (i += 1) { + parts[i] = split; + } + parts[i] = tail; + var initial_values: []DotDefine = &([_]DotDefine{}); + + // "NODE_ENV" + const gpe_entry = try define.dots.getOrPut(tail); + + if (gpe_entry.found_existing) { + for (gpe_entry.value_ptr.*) |*part| { + // ["process", "env"] === ["process", "env"] (if that actually worked) + if (arePartsEqual(part.parts, parts)) { + part.data = part.data.merge(value); + return; + } + } + + initial_values = gpe_entry.value_ptr.*; + } + + var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1); + if (initial_values.len > 0) { + list.appendSliceAssumeCapacity(initial_values); + } + + list.appendAssumeCapacity(DotDefine{ + .data = value, + // TODO: do we need to allocate this? + .parts = parts, + }); + gpe_entry.value_ptr.* = try list.toOwnedSlice(); + } else { + // e.g. IS_BROWSER + try define.identifiers.put(key, value); + } + } + + pub fn init(allocator: std.mem.Allocator, _user_defines: ?UserDefines, string_defines: ?UserDefinesArray) std.mem.Allocator.Error!*@This() { var define = try allocator.create(Define); define.allocator = allocator; define.identifiers = bun.StringHashMap(IdentifierDefine).init(allocator); diff --git a/src/deps/uws.zig b/src/deps/uws.zig index 10bc3b6b3a..102858501a 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -172,15 +172,13 @@ pub const UpgradedDuplex = struct { const buffer = JSC.BinaryType.toJS(.Buffer, data_, globalThis); buffer.ensureStillAlive(); - const result = writeOrEnd.call(globalThis, duplex, &[_]JSC.JSValue{buffer}); - if (result.toError()) |err| { - this.handlers.onError(this.handlers.ctx, err); - } + _ = writeOrEnd.call(globalThis, duplex, &.{buffer}) catch |err| { + this.handlers.onError(this.handlers.ctx, globalThis.takeException(err)); + }; } else { - const result = writeOrEnd.call(globalThis, duplex, &[_]JSC.JSValue{JSC.JSValue.jsNull()}); - if (result.toError()) |err| { - this.handlers.onError(this.handlers.ctx, err); - } + _ = writeOrEnd.call(globalThis, duplex, &.{.null}) catch |err| { + this.handlers.onError(this.handlers.ctx, globalThis.takeException(err)); + }; } } } @@ -2874,30 +2872,37 @@ pub const WebSocketBehavior = extern struct { const active_field_name = if (is_ssl) "ssl" else "tcp"; - pub fn _open(raw_ws: *RawWebSocket) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + pub fn onOpen(raw_ws: *RawWebSocket) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; - @call(bun.callmod_inline, Type.onOpen, .{ this, ws }); + @call(bun.callmod_inline, Type.onOpen, .{ + this, + ws, + }); } - pub fn _message(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize, opcode: Opcode) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + + pub fn onMessage(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize, opcode: Opcode) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; - @call( - .always_inline, - Type.onMessage, - .{ this, ws, if (length > 0) message[0..length] else "", opcode }, - ); + @call(.always_inline, Type.onMessage, .{ + this, + ws, + if (length > 0) message[0..length] else "", + opcode, + }); } - pub fn _drain(raw_ws: *RawWebSocket) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + + pub fn onDrain(raw_ws: *RawWebSocket) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; @call(bun.callmod_inline, Type.onDrain, .{ this, ws, }); } - pub fn _ping(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + + pub fn onPing(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; @call(bun.callmod_inline, Type.onPing, .{ this, @@ -2905,8 +2910,9 @@ pub const WebSocketBehavior = extern struct { if (length > 0) message[0..length] else "", }); } - pub fn _pong(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + + pub fn onPong(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; @call(bun.callmod_inline, Type.onPong, .{ this, @@ -2914,30 +2920,30 @@ pub const WebSocketBehavior = extern struct { if (length > 0) message[0..length] else "", }); } - pub fn _close(raw_ws: *RawWebSocket, code: i32, message: [*c]const u8, length: usize) callconv(.C) void { - var ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); + + pub fn onClose(raw_ws: *RawWebSocket, code: i32, message: [*c]const u8, length: usize) callconv(.C) void { + const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; - @call( - .always_inline, - Type.onClose, - .{ - this, - ws, - code, - if (length > 0 and message != null) message[0..length] else "", - }, - ); + @call(.always_inline, Type.onClose, .{ + this, + ws, + code, + if (length > 0 and message != null) message[0..length] else "", + }); } - pub fn _upgrade(ptr: *anyopaque, res: *uws_res, req: *Request, context: *uws_socket_context_t, id: usize) callconv(.C) void { - @call( - .always_inline, - Server.onWebSocketUpgrade, - .{ bun.cast(*Server, ptr), @as(*NewApp(is_ssl).Response, @ptrCast(res)), req, context, id }, - ); + + pub fn onUpgrade(ptr: *anyopaque, res: *uws_res, req: *Request, context: *uws_socket_context_t, id: usize) callconv(.C) void { + @call(.always_inline, Server.onWebSocketUpgrade, .{ + bun.cast(*Server, ptr), + @as(*NewApp(is_ssl).Response, @ptrCast(res)), + req, + context, + id, + }); } pub fn apply(behavior: WebSocketBehavior) WebSocketBehavior { - return WebSocketBehavior{ + return .{ .compression = behavior.compression, .maxPayloadLength = behavior.maxPayloadLength, .idleTimeout = behavior.idleTimeout, @@ -2946,13 +2952,13 @@ pub const WebSocketBehavior = extern struct { .resetIdleTimeoutOnSend = behavior.resetIdleTimeoutOnSend, .sendPingsAutomatically = behavior.sendPingsAutomatically, .maxLifetime = behavior.maxLifetime, - .upgrade = _upgrade, - .open = _open, - .message = _message, - .drain = _drain, - .ping = _ping, - .pong = _pong, - .close = _close, + .upgrade = onUpgrade, + .open = onOpen, + .message = if (@hasDecl(Type, "onMessage")) onMessage else null, + .drain = if (@hasDecl(Type, "onDrain")) onDrain else null, + .ping = if (@hasDecl(Type, "onPing")) onPing else null, + .pong = if (@hasDecl(Type, "onPong")) onPong else null, + .close = onClose, }; } }; @@ -3192,24 +3198,13 @@ pub fn NewApp(comptime ssl: bool) type { const ThisApp = @This(); pub fn close(this: *ThisApp) void { - if (comptime is_bindgen) { - unreachable; - } - return uws_app_close(ssl_flag, @as(*uws_app_s, @ptrCast(this))); } pub fn create(opts: us_bun_socket_context_options_t) *ThisApp { - if (comptime is_bindgen) { - unreachable; - } return @as(*ThisApp, @ptrCast(uws_create_app(ssl_flag, opts))); } pub fn destroy(app: *ThisApp) void { - if (comptime is_bindgen) { - unreachable; - } - return uws_app_destroy(ssl_flag, @as(*uws_app_s, @ptrCast(app))); } @@ -3224,10 +3219,6 @@ pub fn NewApp(comptime ssl: bool) type { fn RouteHandler(comptime UserDataType: type, comptime handler: fn (UserDataType, *Request, *Response) void) type { return struct { pub fn handle(res: *uws_res, req: *Request, user_data: ?*anyopaque) callconv(.C) void { - if (comptime is_bindgen) { - unreachable; - } - if (comptime UserDataType == void) { return @call( .always_inline, @@ -3255,15 +3246,9 @@ pub fn NewApp(comptime ssl: bool) type { pub const ListenSocket = opaque { pub inline fn close(this: *ThisApp.ListenSocket) void { - if (comptime is_bindgen) { - unreachable; - } return us_listen_socket_close(ssl_flag, @as(*uws.ListenSocket, @ptrCast(this))); } pub inline fn getLocalPort(this: *ThisApp.ListenSocket) i32 { - if (comptime is_bindgen) { - unreachable; - } return us_socket_local_port(ssl_flag, @as(*uws.Socket, @ptrCast(this))); } @@ -3279,10 +3264,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_get(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_get(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn post( app: *ThisApp, @@ -3291,10 +3273,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_post(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_post(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn options( app: *ThisApp, @@ -3303,10 +3282,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_options(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_options(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn delete( app: *ThisApp, @@ -3315,10 +3291,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_delete(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_delete(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn patch( app: *ThisApp, @@ -3327,10 +3300,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_patch(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_patch(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn put( app: *ThisApp, @@ -3339,10 +3309,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_put(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_put(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn head( app: *ThisApp, @@ -3351,10 +3318,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_head(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern.ptr, pattern.len, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_head(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern.ptr, pattern.len, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn connect( app: *ThisApp, @@ -3363,10 +3327,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_connect(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_connect(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn trace( app: *ThisApp, @@ -3375,10 +3336,7 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_trace(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_trace(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn any( app: *ThisApp, @@ -3387,18 +3345,12 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserDataType, comptime handler: (fn (UserDataType, *Request, *Response) void), ) void { - if (comptime is_bindgen) { - unreachable; - } - uws_app_any(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern.ptr, pattern.len, RouteHandler(UserDataType, handler).handle, user_data); + uws_app_any(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern.ptr, pattern.len, RouteHandler(UserDataType, handler).handle, if (UserDataType == void) null else user_data); } pub fn domain(app: *ThisApp, pattern: [:0]const u8) void { uws_app_domain(ssl_flag, @as(*uws_app_t, @ptrCast(app)), pattern); } pub fn run(app: *ThisApp) void { - if (comptime is_bindgen) { - unreachable; - } return uws_app_run(ssl_flag, @as(*uws_app_t, @ptrCast(app))); } pub fn listen( @@ -3408,9 +3360,6 @@ pub fn NewApp(comptime ssl: bool) type { user_data: UserData, comptime handler: fn (UserData, ?*ThisApp.ListenSocket, uws_app_listen_config_t) void, ) void { - if (comptime is_bindgen) { - unreachable; - } const Wrapper = struct { pub fn handle(socket: ?*uws.ListenSocket, conf: uws_app_listen_config_t, data: ?*anyopaque) callconv(.C) void { if (comptime UserData == void) { @@ -4084,10 +4033,11 @@ pub const SendStatus = enum(c_uint) { dropped = 2, }; pub const uws_app_listen_config_t = extern struct { - port: i32, - host: [*c]const u8 = null, - options: i32, + port: c_int, + host: ?[*:0]const u8 = null, + options: c_int = 0, }; +pub const AppListenConfig = uws_app_listen_config_t; extern fn us_socket_mark_needs_more_not_ssl(socket: ?*uws_res) void; diff --git a/src/env.zig b/src/env.zig index 7d410c83e6..2f6f528abc 100644 --- a/src/env.zig +++ b/src/env.zig @@ -27,28 +27,26 @@ pub const isX86 = @import("builtin").target.cpu.arch.isX86(); pub const isX64 = @import("builtin").target.cpu.arch == .x86_64; pub const allow_assert = isDebug or isTest or std.builtin.Mode.ReleaseSafe == @import("builtin").mode; -const BuildOptions = if (isTest) struct { - pub const baseline = false; - pub const sha = "0000000000000000000000000000000000000000"; - pub const is_canary = false; - pub const base_path = "/tmp"; - pub const canary_revision = 0; - pub const reported_nodejs_version = "22.3.0"; -} else @import("root").build_options; +pub const build_options = @import("build_options"); -pub const reported_nodejs_version = BuildOptions.reported_nodejs_version; -pub const baseline = BuildOptions.baseline; +pub const reported_nodejs_version = build_options.reported_nodejs_version; +pub const baseline = build_options.baseline; pub const enableSIMD: bool = !baseline; -pub const git_sha = BuildOptions.sha; -pub const git_sha_short = if (BuildOptions.sha.len > 0) BuildOptions.sha[0..9] else ""; -pub const git_sha_shorter = if (BuildOptions.sha.len > 0) BuildOptions.sha[0..6] else ""; -pub const is_canary = BuildOptions.is_canary; -pub const canary_revision = if (is_canary) BuildOptions.canary_revision else ""; +pub const git_sha = build_options.sha; +pub const git_sha_short = if (build_options.sha.len > 0) build_options.sha[0..9] else ""; +pub const git_sha_shorter = if (build_options.sha.len > 0) build_options.sha[0..6] else ""; +pub const is_canary = build_options.is_canary; +pub const canary_revision = if (is_canary) build_options.canary_revision else ""; pub const dump_source = isDebug and !isTest; -pub const base_path = BuildOptions.base_path ++ "/"; -pub const enable_logs = BuildOptions.enable_logs or isDebug; +pub const base_path = build_options.base_path; +pub const enable_logs = build_options.enable_logs or isDebug; -pub const version: std.SemanticVersion = BuildOptions.version; +/// See -Dforce_embed_code +pub const embed_code = build_options.embed_code; + +pub const codegen_path = build_options.codegen_path; + +pub const version: std.SemanticVersion = build_options.version; pub const version_string = std.fmt.comptimePrint("{d}.{d}.{d}", .{ version.major, version.minor, version.patch }); pub inline fn onlyMac() void { diff --git a/src/feature_flags.zig b/src/feature_flags.zig index 8bd8141608..0e93fb2a32 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -1,62 +1,38 @@ const env = @import("env.zig"); const bun = @import("root").bun; -pub const strong_etags_for_built_files = true; -pub const keep_alive = false; - -// Debug helpers -pub const print_ast = false; -pub const disable_printing_null = false; +/// Enable breaking changes for the next major release of Bun +// TODO: Make this a CLI flag / runtime var so that we can verify disabled code paths can compile +pub const breaking_changes_1_2 = false; /// Store and reuse file descriptors during module resolution /// This was a ~5% performance improvement pub const store_file_descriptors = !env.isBrowser; -pub const css_in_js_import_behavior = CSSInJSImportBehavior.facade; - -pub const only_output_esm = true; - pub const jsx_runtime_is_cjs = true; -pub const bundle_node_modules = true; - pub const tracing = true; +/// Disabled due to bugs pub const minify_javascript_string_length = false; +// TODO: remove this flag, it should use bun.Output.scoped pub const verbose_watcher = false; pub const css_supports_fence = true; pub const enable_entry_cache = true; -pub const enable_bytecode_caching = false; - -pub const dev_only = true; +// TODO: remove this flag, it should use bun.Output.scoped pub const verbose_fs = false; pub const watch_directories = true; -pub const tailwind_css_at_keyword = true; - -pub const bundle_dynamic_import = true; - // This feature flag exists so when you have defines inside package.json, you can use single quotes in nested strings. pub const allow_json_single_quotes = true; pub const react_specific_warnings = true; -pub const CSSInJSImportBehavior = enum { - // When you import a .css file and you reference the import in JavaScript - // Just return whatever the property key they referenced was - facade, - facade_onimportcss, -}; - -// having issues compiling WebKit with this enabled -pub const remote_inspector = false; -pub const auto_import_buffer = false; - pub const is_macro_enabled = !env.isWasm and !env.isWasi; // pretend everything is always the macro environment @@ -65,17 +41,12 @@ pub const force_macro = false; pub const include_filename_in_jsx = false; -pub const verbose_analytics = false; - pub const disable_compression_in_http_client = false; pub const enable_keepalive = true; pub const atomic_file_watcher = env.isLinux; -pub const node_streams = false; -pub const simd = true; - // This change didn't seem to make a meaningful difference in microbenchmarks pub const latin1_is_now_ascii = false; @@ -100,8 +71,6 @@ pub const inline_properties_in_transpiler = true; pub const same_target_becomes_destructuring = true; -pub const react_server_components = true; - pub const help_catch_memory_issues = bun.Environment.allow_assert; /// This performs similar transforms as https://github.com/rollup/plugins/tree/master/packages/commonjs @@ -142,8 +111,6 @@ pub const help_catch_memory_issues = bun.Environment.allow_assert; /// In that case, we wrap it again in the printer. pub const unwrap_commonjs_to_esm = true; -pub const boundary_based_chunking = true; - /// https://sentry.engineering/blog/the-case-for-debug-ids /// https://github.com/mitsuhiko/source-map-rfc/blob/proposals/debug-id/proposals/debug-id.md /// https://github.com/source-map/source-map-rfc/pull/20 @@ -170,10 +137,6 @@ pub const runtime_transpiler_cache = true; /// order to isolate your bug. pub const windows_bunx_fast_path = true; -/// Enable breaking changes for the next major release of Bun -// TODO: Make this a CLI flag / runtime var so that we can verify disabled code paths can compile -pub const breaking_changes_1_2 = false; - // This causes strange bugs where writing via console.log (sync) has a different // order than via Bun.file.writer() so we turn it off until there's a unified, // buffered writer abstraction shared throughout Bun @@ -194,3 +157,6 @@ pub fn isLibdeflateEnabled() bool { return !bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_NO_LIBDEFLATE"); } + +/// Enable experimental bundler tools, codenamed "bun kit" +pub const kit = env.is_canary or env.isDebug; diff --git a/src/fs.zig b/src/fs.zig index 2092ddf24b..7eb3eaf91a 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -1652,6 +1652,14 @@ pub const Path = struct { return hasher.final(); } + /// This hash is used by the hot-module-reloading client in order to + /// identify modules. Since that code is JavaScript, the hash must remain in + /// range [-MAX_SAFE_INTEGER, MAX_SAFE_INTEGER] or else information is lost + /// due to floating-point precision. + pub fn hashForKit(path: Path) u52 { + return @truncate(path.hashKey()); + } + pub fn packageName(this: *const Path) ?string { var name_to_use = this.pretty; if (strings.lastIndexOf(this.text, std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str)) |node_modules| { diff --git a/src/js_ast.zig b/src/js_ast.zig index 9f65e1fc75..ada8a2a013 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -239,12 +239,11 @@ pub const BindingNodeList = []Binding; pub const ImportItemStatus = enum(u2) { none, - - // The linker doesn't report import/export mismatch errors + /// The linker doesn't report import/export mismatch errors generated, - // The printer will replace this import with "undefined" - + /// The printer will replace this import with "undefined" missing, + pub fn jsonStringify(self: @This(), writer: anytype) !void { return try writer.write(@tagName(self)); } @@ -304,10 +303,6 @@ pub const Flags = struct { /// Only applicable to function statements. is_export, - /// Used for Hot Module Reloading's wrapper function - /// "iife" stands for "immediately invoked function expression" - print_as_iife, - pub inline fn init(fields: Fields) Set { return Set.init(fields); } @@ -357,7 +352,6 @@ pub const Binding = struct { .b_missing => { return Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = loc }; }, - .b_identifier => |b| { return wrapper.wrapIdentifier(loc, b.ref); }, @@ -405,14 +399,12 @@ pub const Binding = struct { loc, ); }, - else => |tag| Output.panic("Unexpected binding .{s}", .{@tagName(tag)}), } } pub const Tag = enum(u5) { b_identifier, b_array, - b_property, b_object, b_missing, @@ -432,9 +424,6 @@ pub const Binding = struct { *B.Array => { return Binding{ .loc = loc, .data = B{ .b_array = t } }; }, - *B.Property => { - return Binding{ .loc = loc, .data = B{ .b_property = t } }; - }, *B.Object => { return Binding{ .loc = loc, .data = B{ .b_object = t } }; }, @@ -460,11 +449,6 @@ pub const Binding = struct { data.* = t; return Binding{ .loc = loc, .data = B{ .b_array = data } }; }, - B.Property => { - const data = allocator.create(B.Property) catch unreachable; - data.* = t; - return Binding{ .loc = loc, .data = B{ .b_property = data } }; - }, B.Object => { const data = allocator.create(B.Object) catch unreachable; data.* = t; @@ -480,13 +464,31 @@ pub const Binding = struct { } }; -/// B is for Binding! -/// These are the types of bindings that can be used in the AST. +/// B is for Binding! Bindings are on the left side of variable +/// declarations (s_local), which is how destructuring assignments +/// are represented in memory. Consider a basic example. +/// +/// let hello = world; +/// ^ ^ +/// | E.Identifier +/// B.Identifier +/// +/// Bindings can be nested +/// +/// B.Array +/// | B.Identifier +/// | | +/// let { foo: [ bar ] } = ... +/// ---------------- +/// B.Object pub const B = union(Binding.Tag) { + // let x = ... b_identifier: *B.Identifier, + // let [a, b] = ... b_array: *B.Array, - b_property: *B.Property, + // let { a, b: c } = ... b_object: *B.Object, + // this is used to represent array holes b_missing: B.Missing, pub const Identifier = struct { @@ -496,11 +498,16 @@ pub const B = union(Binding.Tag) { pub const Property = struct { flags: Flags.Property.Set = Flags.Property.None, key: ExprNodeIndex, - value: BindingNodeIndex, - default_value: ?ExprNodeIndex = null, + value: Binding, + default_value: ?Expr = null, }; - pub const Object = struct { properties: []Property, is_single_line: bool = false }; + pub const Object = struct { + properties: []B.Property, + is_single_line: bool = false, + + pub const Property = B.Property; + }; pub const Array = struct { items: []ArrayBinding, @@ -509,6 +516,39 @@ pub const B = union(Binding.Tag) { }; pub const Missing = struct {}; + + /// This hash function is currently only used for React Fast Refresh transform. + /// This doesn't include the `is_single_line` properties, as they only affect whitespace. + pub fn writeToHasher(b: B, hasher: anytype, symbol_table: anytype) void { + switch (b) { + .b_identifier => |id| { + const original_name = id.ref.getSymbol(symbol_table).original_name; + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), original_name.len }); + }, + .b_array => |array| { + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), array.has_spread, array.items.len }); + for (array.items) |item| { + writeAnyToHasher(hasher, .{item.default_value != null}); + if (item.default_value) |default| { + default.data.writeToHasher(hasher, symbol_table); + } + item.binding.data.writeToHasher(hasher, symbol_table); + } + }, + .b_object => |object| { + writeAnyToHasher(hasher, .{ std.meta.activeTag(b), object.properties.len }); + for (object.properties) |property| { + writeAnyToHasher(hasher, .{ property.default_value != null, property.flags }); + if (property.default_value) |default| { + default.data.writeToHasher(hasher, symbol_table); + } + property.key.data.writeToHasher(hasher, symbol_table); + property.value.data.writeToHasher(hasher, symbol_table); + } + }, + .b_missing => {}, + } + } }; pub const ClauseItem = struct { @@ -813,7 +853,7 @@ pub const G = struct { flags: Flags.Property.Set = Flags.Property.None, class_static_block: ?*ClassStaticBlock = null, - ts_decorators: ExprNodeList = ExprNodeList{}, + ts_decorators: ExprNodeList = .{}, // Key is optional for spread key: ?ExprNodeIndex = null, @@ -867,10 +907,10 @@ pub const G = struct { pub const Fn = struct { name: ?LocRef = null, open_parens_loc: logger.Loc = logger.Loc.Empty, - args: []Arg = &([_]Arg{}), + args: []Arg = &.{}, // This was originally nullable, but doing so I believe caused a miscompilation // Specifically, the body was always null. - body: FnBody = FnBody{ .loc = logger.Loc.Empty, .stmts = &([_]StmtNodeIndex{}) }, + body: FnBody = .{ .loc = logger.Loc.Empty, .stmts = &.{} }, arguments_ref: ?Ref = null, flags: Flags.Function.Set = Flags.Function.None, @@ -3224,7 +3264,7 @@ pub const Stmt = struct { }, .s_local => |local| { - return local.kind != S.Kind.k_var; + return local.kind != .k_var; }, else => { return true; @@ -5140,9 +5180,6 @@ pub const Expr = struct { else .mixed; } - - // This can be used when the returned type is either one or the other - }; pub const Data = union(Tag) { @@ -5545,6 +5582,138 @@ pub const Expr = struct { }; } + /// `hasher` should be something with 'pub fn update([]const u8) void'; + /// symbol table is passed to serialize `Ref` as an identifier names instead of a nondeterministic numbers + pub fn writeToHasher(this: Expr.Data, hasher: anytype, symbol_table: anytype) void { + writeAnyToHasher(hasher, std.meta.activeTag(this)); + switch (this) { + .e_array => |e| { + writeAnyToHasher(hasher, .{ + e.is_single_line, + e.is_parenthesized, + e.was_originally_macro, + e.items.len, + }); + for (e.items.slice()) |item| { + item.data.writeToHasher(hasher, symbol_table); + } + }, + .e_unary => |e| { + writeAnyToHasher(hasher, .{e.op}); + e.value.data.writeToHasher(hasher, symbol_table); + }, + .e_binary => |e| { + writeAnyToHasher(hasher, .{e.op}); + e.left.data.writeToHasher(hasher, symbol_table); + e.right.data.writeToHasher(hasher, symbol_table); + }, + .e_class => |e| { + _ = e; // autofix + }, + inline .e_new, .e_call => |e| { + _ = e; // autofix + }, + .e_function => |e| { + _ = e; // autofix + }, + .e_dot => |e| { + writeAnyToHasher(hasher, .{ e.optional_chain, e.name.len }); + e.target.data.writeToHasher(hasher, symbol_table); + hasher.update(e.name); + }, + .e_index => |e| { + writeAnyToHasher(hasher, .{e.optional_chain}); + e.target.data.writeToHasher(hasher, symbol_table); + e.index.data.writeToHasher(hasher, symbol_table); + }, + .e_arrow => |e| { + _ = e; // autofix + }, + .e_jsx_element => |e| { + _ = e; // autofix + }, + .e_object => |e| { + _ = e; // autofix + }, + inline .e_spread, .e_await => |e| { + e.value.data.writeToHasher(hasher, symbol_table); + }, + inline .e_yield => |e| { + writeAnyToHasher(hasher, .{ e.is_star, e.value }); + if (e.value) |value| + value.data.writeToHasher(hasher, symbol_table); + }, + .e_template_part => { + // TODO: delete e_template_part as hit has zero usages + }, + .e_template => |e| { + _ = e; // autofix + }, + .e_if => |e| { + _ = e; // autofix + }, + .e_import => |e| { + _ = e; // autofix + + }, + inline .e_identifier, + .e_import_identifier, + .e_private_identifier, + .e_commonjs_export_identifier, + => |e| { + const symbol = e.ref.getSymbol(symbol_table); + hasher.update(symbol.original_name); + }, + inline .e_boolean, .e_number => |e| { + writeAnyToHasher(hasher, e.value); + }, + inline .e_big_int, .e_reg_exp => |e| { + hasher.update(e.value); + }, + + .e_string => |e| { + var next: ?*E.String = e; + if (next) |current| { + if (current.isUTF8()) { + hasher.update(current.data); + } else { + hasher.update(bun.reinterpretSlice(u8, current.slice16())); + } + next = current.next; + hasher.update("\x00"); + } + }, + inline .e_require_string, .e_require_resolve_string => |e| { + writeAnyToHasher(hasher, e.import_record_index); // preferably, i'd like to write the filepath + }, + + .e_import_meta_main => |e| { + writeAnyToHasher(hasher, e.inverted); + }, + .e_inlined_enum => |e| { + // pretend there is no comment + e.value.data.writeToHasher(hasher, symbol_table); + }, + .e_utf8_string => |e| { + hasher.update(e.data); + }, + + // no data + .e_require_call_target, + .e_require_resolve_call_target, + .e_missing, + .e_this, + .e_super, + .e_null, + .e_undefined, + .e_new_target, + .e_require_main, + .e_import_meta, + .e_module_dot_exports, + => {}, + } + } + /// "const values" here refers to expressions that can participate in constant /// inlining, as they have no side effects on instantiation, and there would be /// no observable difference if duplicated. This is a subset of canBeMoved() @@ -5568,6 +5737,8 @@ pub const Expr = struct { /// outside of a module wrapper (__esm/__commonJS). pub fn canBeMoved(data: Expr.Data) bool { return switch (data) { + .e_identifier => |id| id.can_be_removed_if_unused, + .e_class => |class| class.canBeMoved(), .e_arrow, @@ -6301,6 +6472,10 @@ pub const S = struct { pub fn isUsing(self: Kind) bool { return self == .k_using or self == .k_await_using; } + + pub fn isReassignable(kind: Kind) bool { + return kind == .k_var or kind == .k_let; + } }; }; @@ -6814,13 +6989,6 @@ pub const BundledAst = struct { pub const empty = BundledAst.init(Ast.empty); - pub inline fn uses_exports_ref(this: *const BundledAst) bool { - return this.flags.uses_exports_ref; - } - pub inline fn uses_module_ref(this: *const BundledAst) bool { - return this.flags.uses_module_ref; - } - pub fn toAST(this: *const BundledAst) Ast { return .{ .approximate_newline_count = this.approximate_newline_count, @@ -7346,8 +7514,19 @@ pub const Result = union(enum) { }; pub const StmtOrExpr = union(enum) { - stmt: StmtNodeIndex, - expr: ExprNodeIndex, + stmt: Stmt, + expr: Expr, + + pub fn toExpr(stmt_or_expr: StmtOrExpr) Expr { + return switch (stmt_or_expr) { + .expr => |expr| expr, + .stmt => |stmt| switch (stmt.data) { + .s_function => |s| Expr.init(E.Function, .{ .func = s.func }, stmt.loc), + .s_class => |s| Expr.init(E.Class, s.class, stmt.loc), + else => Output.panic("Unexpected statement type in default export: .{s}", .{@tagName(stmt.data)}), + }, + }; + } }; pub const NamedImport = struct { @@ -7811,18 +7990,17 @@ pub const Macro = struct { vm.enableMacroMode(); - var loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash); + const loaded_result = try vm.loadMacroEntryPoint(input_specifier, function_name, specifier, hash); - if (loaded_result.status(vm.global.vm()) == JSC.JSPromise.Status.Rejected) { - _ = vm.unhandledRejection(vm.global, loaded_result.result(vm.global.vm()), loaded_result.asValue()); - vm.disableMacroMode(); - return error.MacroLoadError; + switch (loaded_result.unwrap(vm.jsc, .leave_unhandled)) { + .rejected => |result| { + _ = vm.unhandledRejection(vm.global, result, loaded_result.asValue()); + vm.disableMacroMode(); + return error.MacroLoadError; + }, + else => {}, } - // We don't need to do anything with the result. - // We just want to make sure the promise is finished. - _ = loaded_result.result(vm.global.vm()); - return Macro{ .vm = vm, .resolver = resolver, @@ -8118,8 +8296,9 @@ pub const Macro = struct { const promise = value.asAnyPromise() orelse @panic("Unexpected promise type"); this.macro.vm.waitForPromise(promise); - const promise_result = promise.result(this.global.vm()); - const rejected = promise.status(this.global.vm()) == .Rejected; + + const promise_result = promise.result(this.macro.vm.jsc); + const rejected = promise.status(this.macro.vm.jsc) == .rejected; if (promise_result.isUndefined() and this.is_top_level) { this.is_top_level = false; @@ -8590,3 +8769,19 @@ const ToJSError = error{ MacroError, OutOfMemory, }; + +fn assertNoPointers(T: type) void { + switch (@typeInfo(T)) { + .Pointer => @compileError("no pointers!"), + .Struct => |s| for (s.fields) |field| { + assertNoPointers(field.type); + }, + .Array => |a| assertNoPointers(a.child), + else => {}, + } +} + +inline fn writeAnyToHasher(hasher: anytype, thing: anytype) void { + comptime assertNoPointers(@TypeOf(thing)); // catch silly mistakes + hasher.update(std.mem.asBytes(&thing)); +} diff --git a/src/js_parser.zig b/src/js_parser.zig index a6e1d2fc46..6b03e651aa 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -1050,30 +1050,26 @@ pub const TypeScript = struct { }; }; -// We must prevent collisions from generated names. -// We want to avoid adding a pass over all the symbols in the file. -// To do that: -// For every generated symbol, we reserve two backup symbol names -// If any usages of the preferred ref, we swap original_name with the backup -// If any usages of the backup ref, we swap original_name with the internal -// We *assume* the internal name is never used. -// In practice, it is possible. But, the internal names are so crazy long you'd have to be deliberately trying to use them. -const GeneratedSymbol = @import("./runtime.zig").Runtime.GeneratedSymbol; - pub const ImportScanner = struct { - stmts: []Stmt = &([_]Stmt{}), - + stmts: []Stmt = &.{}, kept_import_equals: bool = false, removed_import_equals: bool = false, - pub fn scan(comptime P: type, p: *P, stmts: []Stmt, will_transform_to_common_js: bool) !ImportScanner { + + pub fn scan( + comptime P: type, + p: *P, + stmts: []Stmt, + will_transform_to_common_js: bool, + comptime hot_module_reloading_transformations: bool, + hot_module_reloading_context: if (hot_module_reloading_transformations) *P.ConvertESMExportsForHmr else void, + ) !ImportScanner { var scanner = ImportScanner{}; var stmts_end: usize = 0; const allocator = p.allocator; const is_typescript_enabled: bool = comptime P.parser_features.typescript; for (stmts) |_stmt| { - // zls needs the hint, it seems. - var stmt: Stmt = _stmt; + var stmt = _stmt; // copy switch (stmt.data) { .s_import => |st__| { var st = st__.*; @@ -1361,7 +1357,6 @@ pub const ImportScanner = struct { ); } } else { - // ESM requires live bindings // CommonJS does not require live bindings // We load ESM in browsers & in Bun.js @@ -1420,10 +1415,6 @@ pub const ImportScanner = struct { if (st.func.name) |name| { const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name; try p.recordExport(name.loc, original_name, name.ref.?); - - if (p.options.features.hot_module_reloading) { - st.func.flags.remove(.is_export); - } } else { try p.log.addRangeError(p.source, logger.Range{ .loc = st.func.open_parens_loc, .len = 2 }, "Exported functions must have a name"); } @@ -1433,10 +1424,6 @@ pub const ImportScanner = struct { if (st.is_export) { if (st.class.class_name) |name| { try p.recordExport(name.loc, p.symbols.items[name.ref.?.innerIndex()].original_name, name.ref.?); - - if (p.options.features.hot_module_reloading) { - st.is_export = false; - } } else { try p.log.addRangeError(p.source, logger.Range{ .loc = st.class.body_loc, .len = 0 }, "Exported classes must have a name"); } @@ -1482,13 +1469,9 @@ pub const ImportScanner = struct { } } - // We must do this at the end to not mess up import = - if (p.options.features.hot_module_reloading and st.is_export) { - st.is_export = false; - } - // when bundling, all top-level variables become var - if (p.options.bundle and !st.kind.isUsing()) { + // TODO(@paperdave): we already do this earlier in visiting? + if (!hot_module_reloading_transformations and p.options.bundle and !st.kind.isUsing()) { st.kind = .k_var; } }, @@ -1503,129 +1486,8 @@ pub const ImportScanner = struct { // Rewrite this export to be: // exports.default = // But only if it's anonymous - if (p.options.features.hot_module_reloading) { - - // export default can be: - // - an expression - // - a function - // - a class - // it cannot be a declaration! - // we want to avoid adding a new name - // but we must remove the export default clause. - transform_export_default_when_its_anonymous: { - switch (st.value) { - .expr => |ex| { - switch (ex.data) { - .e_identifier => { - continue; - }, - .e_import_identifier => |import_ident| { - st.default_name.ref = import_ident.ref; - continue; - }, - .e_function => |func| { - if (func.func.name) |name_ref| { - if (name_ref.ref != null) { - stmt = p.s(S.Function{ .func = func.func }, ex.loc); - st.default_name.ref = name_ref.ref.?; - break :transform_export_default_when_its_anonymous; - } - } - }, - .e_class => |class| { - if (class.class_name) |name_ref| { - if (name_ref.ref != null) { - stmt = p.s( - S.Class{ - .class = class.*, - }, - ex.loc, - ); - st.default_name.ref = name_ref.ref.?; - break :transform_export_default_when_its_anonymous; - } - } - }, - else => {}, - } - var decls = try allocator.alloc(G.Decl, 1); - decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = st.default_name.ref.? }, stmt.loc), .value = ex }; - - stmt = p.s(S.Local{ - .decls = G.Decl.List.init(decls), - .kind = S.Local.Kind.k_var, - .is_export = false, - }, ex.loc); - }, - .stmt => |class_or_func| { - switch (class_or_func.data) { - .s_function => |func| { - if (func.func.name) |name_ref| { - if (name_ref.ref != null) { - stmt = class_or_func; - st.default_name.ref = name_ref.ref.?; - break :transform_export_default_when_its_anonymous; - } - } - - var decls = try allocator.alloc(G.Decl, 1); - decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = st.default_name.ref.? }, stmt.loc), .value = p.newExpr(E.Function{ .func = func.func }, stmt.loc) }; - - stmt = p.s(S.Local{ - .decls = Decl.List.init(decls), - .kind = S.Local.Kind.k_var, - .is_export = false, - }, stmt.loc); - }, - .s_class => |class| { - if (class.class.class_name) |name_ref| { - if (name_ref.ref != null) { - stmt = class_or_func; - st.default_name.ref = name_ref.ref.?; - break :transform_export_default_when_its_anonymous; - } - } - - var decls = try allocator.alloc(G.Decl, 1); - decls[0] = G.Decl{ - .binding = p.b(B.Identifier{ .ref = st.default_name.ref.? }, stmt.loc), - .value = p.newExpr(E.Class{ - .class_keyword = class.class.class_keyword, - .ts_decorators = class.class.ts_decorators, - .class_name = class.class.class_name, - .extends = class.class.extends, - .body_loc = class.class.body_loc, - .properties = class.class.properties, - .close_brace_loc = class.class.close_brace_loc, - }, stmt.loc), - }; - - stmt = p.s(S.Local{ - .decls = Decl.List.init(decls), - .kind = S.Local.Kind.k_var, - .is_export = false, - }, stmt.loc); - }, - else => unreachable, - } - }, - } - } - } else if (will_transform_to_common_js) { - const expr: js_ast.Expr = switch (st.value) { - .expr => |exp| exp, - .stmt => |s2| brk2: { - switch (s2.data) { - .s_function => |func| { - break :brk2 p.newExpr(E.Function{ .func = func.func }, s2.loc); - }, - .s_class => |class| { - break :brk2 p.newExpr(class.class, s2.loc); - }, - else => unreachable, - } - }, - }; + if (!hot_module_reloading_transformations and will_transform_to_common_js) { + const expr = st.value.toExpr(); var export_default_args = p.allocator.alloc(Expr, 2) catch unreachable; export_default_args[0] = p.@"module.exports"(expr.loc); export_default_args[1] = expr; @@ -1636,11 +1498,6 @@ pub const ImportScanner = struct { for (st.items) |item| { try p.recordExport(item.alias_loc, item.alias, item.name.ref.?); } - - // export clauses simply disappear when we have HMR on, we use NamedExports to regenerate it at the end - if (p.options.features.hot_module_reloading) { - continue; - } }, .s_export_star => |st| { try p.import_records_for_current_part.append(allocator, st.import_record_index); @@ -1692,85 +1549,48 @@ pub const ImportScanner = struct { else => {}, } - stmts[stmts_end] = stmt; - stmts_end += 1; + if (hot_module_reloading_transformations) { + try hot_module_reloading_context.convertStmt(p, stmt); + } else { + stmts[stmts_end] = stmt; + stmts_end += 1; + } } - scanner.stmts = stmts[0..stmts_end]; + + if (!hot_module_reloading_transformations) + scanner.stmts = stmts[0..stmts_end]; + return scanner; } }; +/// We must prevent collisions from generated names with user's names. +/// +/// When transpiling for the runtime, we want to avoid adding a pass over all +/// the symbols in the file (we do this in the bundler since there is more than +/// one file, and user symbols from different files may collide with each +/// other). +/// +/// The solution: For every generated symbol, we reserve two backup symbol names: +/// - If any usages of `.primary`, fall back to `.backup` +/// - If any usages of `.backup`, fall back to `.internal` +/// - We *assume* the internal name is never used. In practice, it is possible. But, the +/// internal names are so crazy long you'd have to be deliberately trying to use them. const StaticSymbolName = struct { - internal: string, primary: string, backup: string, + internal: string, - pub const List = struct { - fn NewStaticSymbol(comptime basename: string) StaticSymbolName { - const hash_value = bun.hash(basename); - return comptime StaticSymbolName{ - .internal = basename ++ "_" ++ std.fmt.comptimePrint("{any}", .{bun.fmt.hexIntLower(hash_value)}), - .primary = basename, - .backup = "_" ++ basename ++ "$", - }; - } - - fn NewStaticSymbolWithBackup(comptime basename: string, comptime backup: string) StaticSymbolName { - const hash_value = bun.hash(basename); - return comptime StaticSymbolName{ - .internal = basename ++ "_" ++ std.fmt.comptimePrint("{any}", .{bun.fmt.hexIntLower(hash_value)}), - .primary = basename, - .backup = backup, - }; - } - - pub const jsx = NewStaticSymbol("$jsx"); - pub const jsxs = NewStaticSymbol("jsxs"); - pub const ImportSource = NewStaticSymbol("JSX"); - pub const ClassicImportSource = NewStaticSymbol("JSXClassic"); - pub const jsxFilename = NewStaticSymbolWithBackup("fileName", "jsxFileName"); - pub const REACT_ELEMENT_TYPE = NewStaticSymbolWithBackup("$$typeof", "$$reactEl"); - pub const Symbol = NewStaticSymbolWithBackup("Symbol", "Symbol"); - pub const Factory = NewStaticSymbol("jsxEl"); - pub const Refresher = NewStaticSymbol("FastRefresh"); - pub const Fragment = NewStaticSymbol("JSXFrag"); - - pub const __name = NewStaticSymbol("__name"); - pub const __toModule = NewStaticSymbol("__toModule"); - pub const __require = NewStaticSymbol("require"); - pub const __cJS2eSM = NewStaticSymbol("__cJS2eSM"); - pub const __export = NewStaticSymbol("__export"); - pub const __load = NewStaticSymbol("__load"); - pub const @"$$lzy" = NewStaticSymbol("$$lzy"); - pub const __HMRModule = NewStaticSymbol("HMR"); - pub const __HMRClient = NewStaticSymbol("Bun"); - pub const __FastRefreshModule = NewStaticSymbol("FastHMR"); - pub const __FastRefreshRuntime = NewStaticSymbol("FastRefresh"); - pub const __legacyDecorateClassTS = NewStaticSymbol("__legacyDecorateClassTS"); - pub const __legacyDecorateParamTS = NewStaticSymbol("__legacyDecorateParamTS"); - pub const __legacyMetadataTS = NewStaticSymbol("__legacyMetadataTS"); - pub const @"$$typeof" = NewStaticSymbol("$$typeof"); - - pub const @"$$m" = NewStaticSymbol("$$m"); - - pub const __exportValue = NewStaticSymbol("__exportValue"); - pub const __exportDefault = NewStaticSymbol("__exportDefault"); - pub const hmr = NewStaticSymbol("hmr"); - - pub const insert = NewStaticSymbol("insert"); - pub const template = NewStaticSymbol("template"); - pub const wrap = NewStaticSymbol("wrap"); - pub const createComponent = NewStaticSymbol("createComponent"); - pub const setAttribute = NewStaticSymbol("setAttribute"); - pub const effect = NewStaticSymbol("effect"); - pub const delegateEvents = NewStaticSymbol("delegateEvents"); - - pub const __merge = NewStaticSymbol("__merge"); - - pub const __using = NewStaticSymbol("__using"); - pub const __callDispose = NewStaticSymbol("__callDispose"); - }; + fn init(comptime basename: string) StaticSymbolName { + const hash_value = bun.hash(basename); + return comptime .{ + .internal = std.fmt.comptimePrint("{s}_{}", .{ basename, bun.fmt.hexIntLower(hash_value) }), + .primary = basename, + .backup = "_" ++ basename ++ "$", + }; + } }; +const GeneratedSymbol = @import("./runtime.zig").Runtime.GeneratedSymbol; pub const SideEffects = enum(u1) { could_have_side_effects, @@ -3072,8 +2892,6 @@ pub const Parser = struct { warn_about_unbundled_modules: bool = true, - legacy_transform_require_to_import: bool = true, - module_type: options.ModuleType = .unknown, transform_only: bool = false, @@ -3260,7 +3078,7 @@ pub const Parser = struct { } break :brk .none; }; - return .{ .ast = try p.toAST(parts, exports_kind, .{ .none = {} }, "") }; + return .{ .ast = try p.toAST(parts, exports_kind, .none, "") }; } pub fn parse(self: *Parser) !js_ast.Result { @@ -3357,6 +3175,10 @@ pub const Parser = struct { const orig_error_count = self.log.errors; try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p); + if (p.options.features.hot_module_reloading) { + bun.assert(!p.options.tree_shaking); + } + // Instead of doing "should_fold_typescript_constant_expressions or features.minify_syntax" // Let's enable this flag file-wide if (p.options.features.minify_syntax or @@ -3475,6 +3297,12 @@ pub const Parser = struct { before.deinit(); } + if (p.options.bundle) { + // The bundler requires a part for generated module wrappers. This + // part must be at the start as it is referred to by index. + before.append(js_ast.Part{}) catch bun.outOfMemory(); + } + // --inspect-brk if (p.options.features.set_breakpoint_on_first_line) { var debugger_stmts = try p.allocator.alloc(Stmt, 1); @@ -3486,14 +3314,7 @@ pub const Parser = struct { js_ast.Part{ .stmts = debugger_stmts, }, - ) catch unreachable; - } - - if (p.options.bundle) { - // allocate an empty part for the bundle - before.append( - js_ast.Part{}, - ) catch unreachable; + ) catch bun.outOfMemory(); } // When "using" declarations appear at the top level, we change all TDZ @@ -3760,7 +3581,7 @@ pub const Parser = struct { // https://github.com/lodash/lodash/issues/5660 var force_esm = false; - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.shouldUnwrapCommonJSToESM()) { if (p.imports_to_convert_from_require.items.len > 0) { const all_stmts = p.allocator.alloc(Stmt, p.imports_to_convert_from_require.items.len) catch unreachable; before.ensureUnusedCapacity(p.imports_to_convert_from_require.items.len) catch unreachable; @@ -4067,7 +3888,7 @@ pub const Parser = struct { const uses_module_ref = p.symbols.items[p.module_ref.innerIndex()].use_count_estimate > 0; - var wrapper_expr: CommonJSWrapper = .{ .none = {} }; + var wrap_mode: WrapMode = .none; if (p.isDeoptimizedCommonJS()) { exports_kind = .cjs; @@ -4076,9 +3897,7 @@ pub const Parser = struct { } else if (uses_exports_ref or uses_module_ref or p.has_top_level_return or p.has_with_scope) { exports_kind = .cjs; if (p.options.features.commonjs_at_runtime) { - wrapper_expr = .{ - .bun_js = {}, - }; + wrap_mode = .bun_commonjs; const import_record: ?*const ImportRecord = brk: { for (p.import_records.items) |*import_record| { @@ -4125,28 +3944,6 @@ pub const Parser = struct { try p.log.addRangeErrorWithNotes(p.source, record.range, "Cannot use import statement with CommonJS-only features", notes.items); } - } else if (!p.options.bundle and !p.options.features.commonjs_at_runtime and (!p.options.transform_only or p.options.features.use_import_meta_require)) { - if (p.options.legacy_transform_require_to_import or p.options.features.use_import_meta_require) { - const args = p.allocator.alloc(Expr, 2) catch unreachable; - - if (p.runtime_imports.__exportDefault == null and p.has_export_default) { - p.runtime_imports.__exportDefault = try p.declareGeneratedSymbol(.other, "__exportDefault"); - p.resolveGeneratedSymbol(&p.runtime_imports.__exportDefault.?); - } - - wrapper_expr = .{ .bun_dev = p.callRuntime(logger.Loc.Empty, "__cJS2eSM", args) }; - p.resolveGeneratedSymbol(&p.runtime_imports.__cJS2eSM.?); - - // Disable HMR if we're wrapping it in CommonJS - // It's technically possible to support this. - // But we need to cut scope for the v0. - p.options.features.hot_module_reloading = false; - p.options.features.react_fast_refresh = false; - p.runtime_imports.__HMRModule = null; - p.runtime_imports.__FastRefreshModule = null; - p.runtime_imports.__FastRefreshRuntime = null; - p.runtime_imports.__HMRClient = null; - } } } else { switch (p.options.module_type) { @@ -4190,9 +3987,7 @@ pub const Parser = struct { } if (exports_kind == .cjs and p.options.features.commonjs_at_runtime) { - wrapper_expr = .{ - .bun_js = {}, - }; + wrap_mode = .bun_commonjs; } } @@ -4333,28 +4128,6 @@ pub const Parser = struct { } } - if (p.legacy_cjs_import_stmts.items.len > 0 and p.options.legacy_transform_require_to_import) { - var import_records = try bun.BabyList(u32).initCapacity(p.allocator, p.legacy_cjs_import_stmts.items.len); - var declared_symbols = DeclaredSymbol.List{}; - try declared_symbols.ensureTotalCapacity(p.allocator, p.legacy_cjs_import_stmts.items.len); - - for (p.legacy_cjs_import_stmts.items) |entry| { - const import_statement: *S.Import = entry.data.s_import; - import_records.appendAssumeCapacity(import_statement.import_record_index); - declared_symbols.appendAssumeCapacity(.{ - .ref = import_statement.namespace_ref, - .is_top_level = true, - }); - } - - before.append(js_ast.Part{ - .stmts = p.legacy_cjs_import_stmts.items, - .declared_symbols = declared_symbols, - .import_record_indices = import_records, - .tag = .cjs_imports, - }) catch unreachable; - } - if (p.has_called_runtime) { var runtime_imports: [RuntimeImports.all.len]u8 = undefined; var iter = p.runtime_imports.iter(); @@ -4422,6 +4195,10 @@ pub const Parser = struct { } } + if (p.react_refresh.register_used or p.react_refresh.signature_used) { + try p.generateReactRefreshImport(&before); + } + var parts_slice: []js_ast.Part = &([_]js_ast.Part{}); if (before.items.len > 0 or after.items.len > 0) { @@ -4475,7 +4252,7 @@ pub const Parser = struct { } } - return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrapper_expr, hashbang) }; + return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrap_mode, hashbang) }; } pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: Allocator) !Parser { @@ -4610,11 +4387,11 @@ pub const Prefill = struct { pub const Zero = Expr.Data{ .e_number = Value.Zero }; }; pub const Runtime = struct { - pub var JSXFilename = "__jsxFilename"; - pub var MarkAsModule = "__markAsModule"; - pub var CommonJS = "__commonJS"; - pub var ToModule = "__toModule"; - const JSXShortname = "jsx"; + // pub var JSXFilename = "__jsxFilename"; + // pub var MarkAsModule = "__markAsModule"; + // pub var CommonJS = "__commonJS"; + // pub var ToModule = "__toModule"; + // const JSXShortname = "jsx"; }; }; @@ -4640,62 +4417,10 @@ const JSXTransformType = enum { const ParserFeatures = struct { typescript: bool = false, - jsx: JSXTransformType = JSXTransformType.none, + jsx: JSXTransformType = .none, scan_only: bool = false, - - // *** How React Fast Refresh works *** - // - // Implementations: - // [0]: https://github.com/facebook/react/blob/master/packages/react-refresh/src/ReactFreshBabelPlugin.js - // [1]: https://github.com/swc-project/swc/blob/master/ecmascript/transforms/react/src/refresh/mod.rs - // - // Additional reading: - // - https://github.com/facebook/react/issues/16604#issuecomment-528663101 - // - https://github.com/facebook/react/blob/master/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js - // - // From reading[0] and Dan Abramov's comment, there are really five parts. - // 1. At the top of the file: - // 1. Declare a $RefreshReg$ if it doesn't exist - // - This really just does "RefreshRuntime.register(ComponentIdentifier, ComponentIdentifier.name);" - // 2. Run "var _s${componentIndex} = $RefreshSig$()" to generate a function for updating react refresh scoped to the component. So it's one per *component*. - // - This really just does "RefreshRuntime.createSignatureFunctionForTransform();" - // 2. Register all React components[2] defined in the module scope by calling the equivalent of $RefreshReg$(ComponentIdentifier, "ComponentName") - // 3. For each registered component: - // 1. Call "_s()" to mark the first render of this component for "react-refresh/runtime". Call this at the start of the React component's function body - // 2. Track every call expression to a hook[3] inside the component, including: - // - Identifier of the hook function - // - Arguments passed - // 3. For each hook's call expression, generate a signature key which is - // - The hook's identifier ref - // - The S.Decl ("VariableDeclarator")'s source - // "var [foo, bar] = useFooBar();" - // ^--------^ This region, I think. Judging from this line: https://github.com/facebook/react/blob/master/packages/react-refresh/src/ReactFreshBabelPlugin.js#L407 - // - For the "useState" hook, also hash the source of the first argument if it exists e.g. useState(foo => true); - // - For the "useReducer" hook, also hash the source of the second argument if it exists e.g. useReducer({}, () => ({})); - // 4. If the hook component is not builtin and is defined inside a component, always reset the component state - // - See this test: https://github.com/facebook/react/blob/568dc3532e25b30eee5072de08503b1bbc4f065d/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js#L909 - // 4. From the signature key generated in 3., call one of the following: - // - _s(ComponentIdentifier, hash(signature)); - // - _s(ComponentIdentifier, hash(signature), true /* forceReset */); - // - _s(ComponentIdentifier, hash(signature), false /* forceReset */, () => [customHook1, customHook2, customHook3]); - // Note: This step is only strictly required on rebuild. - // 5. if (isReactComponentBoundary(exports)) enqueueUpdateAndHandleErrors(); - // **** FAQ **** - // [2]: Q: From a parser's perspective, what's a component? - // A: typeof name === 'string' && name[0] >= 'A' && name[0] <= 'Z -- https://github.com/facebook/react/blob/568dc3532e25b30eee5072de08503b1bbc4f065d/packages/react-refresh/src/ReactFreshBabelPlugin.js#L42-L44 - // [3]: Q: From a parser's perspective, what's a hook? - // A: /^use[A-Z]/ -- https://github.com/facebook/react/blob/568dc3532e25b30eee5072de08503b1bbc4f065d/packages/react-refresh/src/ReactFreshBabelPlugin.js#L390 - // - // - // - // react_fast_refresh: bool = false, }; -// Our implementation diverges somewhat from the official implementation -// Specifically, we use a subclass of HMRModule - FastRefreshModule -// Instead of creating a globally-scoped -const FastRefresh = struct {}; - const ImportItemForNamespaceMap = bun.StringArrayHashMap(LocRef); pub const KnownGlobal = enum { @@ -5010,8 +4735,6 @@ fn NewParser_( is_file_considered_to_have_esm_exports: bool = false, - hmr_module: GeneratedSymbol = GeneratedSymbol{ .primary = Ref.None, .backup = Ref.None, .ref = Ref.None }, - has_called_runtime: bool = false, legacy_cjs_import_stmts: std.ArrayList(Stmt), @@ -5097,13 +4820,11 @@ fn NewParser_( jsx_automatic: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None }, jsxs_runtime: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None }, jsx_classic: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None }, - jsx_imports: JSXImport.Symbols = .{}, - // only applicable when is_react_fast_refresh_enabled - jsx_refresh_runtime: GeneratedSymbol = GeneratedSymbol{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None }, - - bun_jsx_ref: Ref = Ref.None, + // only applicable when `.options.features.react_fast_refresh` is set. + // populated before visit pass starts. + react_refresh: ReactRefresh = .{}, jest: Jest = .{}, @@ -5262,9 +4983,6 @@ fn NewParser_( require_transposer: RequireTransposer, require_resolve_transposer: RequireResolveTransposer, - // This is a general place to put lots of Expr objects - expr_list: List(Expr) = .{}, - const_values: js_ast.Ast.ConstValuesMap = .{}, // These are backed by stack fallback allocators in _parse, and are uninitialized until then. @@ -5292,6 +5010,9 @@ fn NewParser_( // If this is true, then all top-level statements are wrapped in a try/catch will_wrap_module_in_try_catch_for_using: bool = false, + /// Used for react refresh, it must be able to insert `const _s = $RefreshSig$();` + nearest_stmt_list: ?*ListManaged(Stmt) = null, + const RecentlyVisitedTSNamespace = struct { expr: Expr.Data = Expr.empty.data, map: ?*js_ast.TSNamespaceMemberMap = null, @@ -5302,6 +5023,136 @@ fn NewParser_( }; }; + /// "Fast Refresh" is React's solution for hot-module-reloading in the context of the UI framework + /// user guide: https://reactnative.dev/docs/fast-refresh (applies to react-dom and native) + /// + /// This depends on performing a couple extra transformations at bundle time, as well as + /// including the `react-refresh` NPM package, which is able to do the heavy lifting, + /// integrating with `react` and `react-dom`. + /// + /// Prior implementations: + /// [1]: https://github.com/facebook/react/blob/main/packages/react-refresh/src/ReactFreshBabelPlugin.js + /// [2]: https://github.com/swc-project/swc/blob/main/crates/swc_ecma_transforms_react/src/refresh/mod.rs + /// + /// Additional reading: + /// [3] https://github.com/facebook/react/issues/16604#issuecomment-528663101 + /// [4] https://github.com/facebook/react/blob/master/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js + /// + /// Instead of a plugin which visits the tree separately, Bun's implementation of fast refresh + /// happens in tandem with the visit pass. The responsibilities of the transform are as follows: + /// + /// 1. For all Components (which is defined as any top-level function/function variable, that is + /// named with a capital letter; see `isComponentishName`), register them to the runtime using + /// `$RefreshReg$(ComponentFunction, "Component");`. Implemented in `p.handleReactRefreshRegister` + /// HOC components are also registered, but only through a special case for `export default` + /// + /// 2. For all functions which call a Hook (a hook is an identifier matching /^use[A-Z]/): + /// a. Outside of the function, create a signature function `const _s = $RefreshSig$();` + /// b. At the start of the function, call `_s()` + /// c. Record all of the hooks called, the variables they are assigned to, and + /// arguments depending on which hook has been used. `useState` and `useReducer`, + /// for example, are special-cased. + /// d. Directly after the function, call `_s(hook, "", forceReset)` + /// - If a user-defined hook is called, the alterate form is used: + /// `_s(hook, "", forceReset, () => [useCustom1, useCustom2])` + /// + /// The upstream transforms do not declare `$RefreshReg$` or `$RefreshSig$`. A typical + /// implementation might look like this, prepending this data to the module start: + /// + /// import * as Refresh from 'react-refresh/runtime'; + /// const $RefreshReg$ = (type, id) => Refresh.register(type, "" + id); + /// const $RefreshSig$ = Refresh.createSignatureFunctionForTransform; + /// + /// Since Bun is a transpiler *and* bundler, we take a slightly different approach. Aside + /// from including the link to the refresh runtime, our notation of $RefreshReg$ is just + /// pointing at `Refresh.register`, which means when we call it, the second argument has + /// to be a string containing the filepath, not just the component name. + const ReactRefresh = struct { + // Set if this JSX/TSX file uses the refresh runtime. If so, + // we must insert an import statement to it. + register_used: bool = false, + signature_used: bool = false, + + /// $RefreshReg$ is called on all top-level variables that are + /// components, as well as HOCs found in the `export default` clause. + register_ref: Ref = Ref.None, + + /// $RefreshSig$ is called to create a signature function, which is + /// used by the refresh runtime to perform smart hook tracking. + create_signature_ref: Ref = Ref.None, + + /// If a comment with '@refresh reset' is seen, we will forward a + /// force refresh to the refresh runtime. This lets you reset the + /// state of hooks on an update on a per-component basis. + // TODO: this is never set + force_reset: bool = false, + + /// The last hook that was scanned. This is used when visiting + /// `.s_local`, as we must hash the variable destructure if the + /// hook's result is assigned directly to a local. + last_hook_seen: ?*E.Call = null, + + /// Every function sets up stack memory to hold data related to it's + /// hook tracking. This is a pointer to that ?HookContext, where an + /// inner null means there are no hook calls. + /// + /// The inner value is initialized when the first hook .e_call is + /// visited, where the '_s' symbol is reserved. Additional hook calls + /// append to the `hasher` and `user_hooks` as needed. + /// + /// When a function is done visiting, the stack location is checked, + /// and then it will insert `var _s = ...`, add the `_s()` call at + /// the start of the function, and then add the call to `_s(func, ...)`. + hook_ctx_storage: ?*?HookContext = null, + + pub const HookContext = struct { + hasher: std.hash.Wyhash, + signature_cb: Ref, + user_hooks: std.AutoArrayHashMapUnmanaged(Ref, Expr), + }; + + // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L42 + pub fn isComponentishName(id: []const u8) bool { + if (id.len == 0) return false; + return switch (id[0]) { + 'A'...'Z' => true, + else => false, + }; + } + + // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L408 + pub fn isHookName(id: []const u8) bool { + return id.len >= 4 and + strings.hasPrefixComptime(id, "use") and + switch (id[3]) { + 'A'...'Z' => true, + else => false, + }; + } + + pub const built_in_hooks = bun.ComptimeEnumMap(enum { + useState, + useReducer, + useEffect, + useLayoutEffect, + useMemo, + useCallback, + useRef, + useContext, + useImperativeHandle, + useDebugValue, + useId, + useDeferredValue, + useTransition, + useInsertionEffect, + useSyncExternalStore, + useFormStatus, + useFormState, + useActionState, + useOptimistic, + }); + }; + /// use this instead of checking p.source.index /// because when not bundling, p.source.index is `0` inline fn isSourceRuntime(p: *const P) bool { @@ -5421,22 +5272,22 @@ fn NewParser_( // For unwrapping CommonJS into ESM to fully work // we must also unwrap requires into imports. - const should_unwrap_require = p.unwrap_all_requires or - if (path.packageName()) |pkg| p.options.features.shouldUnwrapRequire(pkg) else false; - - if (should_unwrap_require and + const should_unwrap_require = !p.options.features.hot_module_reloading and + (p.unwrap_all_requires or + if (path.packageName()) |pkg| p.options.features.shouldUnwrapRequire(pkg) else false) and // We cannot unwrap a require wrapped in a try/catch because // import statements cannot be wrapped in a try/catch and // require cannot return a promise. - !handles_import_errors) - { + !handles_import_errors; + + if (should_unwrap_require) { const import_record_index = p.addImportRecordByRangeAndPath(.stmt, p.source.rangeOfString(arg.loc), path); p.import_records.items[import_record_index].handles_import_errors = handles_import_errors; // Note that this symbol may be completely removed later. - var path_name = fs.PathName.init(strings.append(p.allocator, "import_", path.text) catch unreachable); - const name = path_name.nonUniqueNameString(p.allocator) catch unreachable; - const namespace_ref = p.newSymbol(.other, name) catch unreachable; + var path_name = fs.PathName.init(path.text); + const name = path_name.nonUniqueNameString(p.allocator) catch bun.outOfMemory(); + const namespace_ref = p.newSymbol(.other, name) catch bun.outOfMemory(); p.imports_to_convert_from_require.append(p.allocator, .{ .namespace = .{ @@ -5444,8 +5295,8 @@ fn NewParser_( .loc = arg.loc, }, .import_record_id = import_record_index, - }) catch unreachable; - p.import_items_for_namespace.put(p.allocator, namespace_ref, ImportItemForNamespaceMap.init(p.allocator)) catch unreachable; + }) catch bun.outOfMemory(); + p.import_items_for_namespace.put(p.allocator, namespace_ref, ImportItemForNamespaceMap.init(p.allocator)) catch bun.outOfMemory(); p.recordUsage(namespace_ref); if (!state.is_require_immediately_assigned_to_decl) { @@ -5467,60 +5318,7 @@ fn NewParser_( p.import_records.items[import_record_index].handles_import_errors = handles_import_errors; p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable; - if (!p.options.legacy_transform_require_to_import) { - return p.newExpr(E.RequireString{ .import_record_index = import_record_index }, arg.loc); - } - - p.import_records.items[import_record_index].was_originally_require = true; - p.import_records.items[import_record_index].contains_import_star = true; - - const symbol_name = p.import_records.items[import_record_index].path.name.nonUniqueNameString(p.allocator) catch unreachable; - const hash_value = @as( - u16, - @truncate(bun.hash(p.import_records.items[import_record_index].path.text)), - ); - - const cjs_import_name = std.fmt.allocPrint( - p.allocator, - "{s}_{any}_{d}", - .{ - symbol_name, - bun.fmt.hexIntLower(hash_value), - p.legacy_cjs_import_stmts.items.len, - }, - ) catch unreachable; - - const namespace_ref = p.declareSymbol(.hoisted, arg.loc, cjs_import_name) catch unreachable; - - p.legacy_cjs_import_stmts.append( - p.s( - S.Import{ - .namespace_ref = namespace_ref, - .star_name_loc = arg.loc, - .is_single_line = true, - .import_record_index = import_record_index, - }, - arg.loc, - ), - ) catch unreachable; - - const args = p.allocator.alloc(Expr, 1) catch unreachable; - args[0] = p.newExpr( - E.ImportIdentifier{ - .ref = namespace_ref, - }, - arg.loc, - ); - - // require(import_object_assign) - p.recordUsageOfRuntimeRequire(); - return p.newExpr( - E.Call{ - .target = p.valueForRequire(arg.loc), - .args = ExprNodeList.init(args), - }, - arg.loc, - ); + return p.newExpr(E.RequireString{ .import_record_index = import_record_index }, arg.loc); }, else => { p.recordUsageOfRuntimeRequire(); @@ -5537,6 +5335,11 @@ fn NewParser_( } } + pub fn shouldUnwrapCommonJSToESM(p: *P) bool { + // hot module loading opts out of this because we want to produce a cjs bundle at the end + return FeatureFlags.unwrap_commonjs_to_esm and !p.options.features.hot_module_reloading; + } + fn isBindingUsed(p: *P, binding: Binding, default_export_ref: Ref) bool { switch (binding.data) { .b_identifier => |ident| { @@ -5570,10 +5373,6 @@ fn NewParser_( return false; }, - .b_property => |prop| { - return p.isBindingUsed(prop.value, default_export_ref); - }, - .b_missing => return false, } } @@ -5728,20 +5527,13 @@ fn NewParser_( const declared_refs = part.declared_symbols.refs(); for (declared_refs) |declared| { symbols[declared.innerIndex()].use_count_estimate = 0; - // } } } pub fn s(_: *P, t: anytype, loc: logger.Loc) Stmt { const Type = @TypeOf(t); - comptime { - if (!is_typescript_enabled and (Type == S.TypeScript or Type == *S.TypeScript)) { - @compileError("Attempted to use TypeScript syntax in a non-TypeScript environment"); - } - } - if (!is_typescript_enabled and (Type == S.TypeScript or Type == *S.TypeScript)) { - unreachable; + @compileError("Attempted to use TypeScript syntax in a non-TypeScript environment"); } // Output.print("\nStmt: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start }); @@ -6002,9 +5794,6 @@ fn NewParser_( p.recordExportedBinding(prop.value); } }, - else => { - p.panic("Unexpected binding export type {any}", .{binding}); - }, } } @@ -6276,6 +6065,7 @@ fn NewParser_( .namespace_ref = namespace_ref, .items = clause_items, .import_record_index = import_record_i, + .is_single_line = true, }, logger.Loc{}, ); @@ -6286,8 +6076,9 @@ fn NewParser_( var import_records = try allocator.alloc(@TypeOf(import_record_i), 1); import_records[0] = import_record_i; - // Append a single import to the end of the file (ES6 imports are hoisted - // so we don't need to worry about where the import statement goes) + // This import is placed in a part before the main code, however + // the bundler ends up re-ordering this to be after... The order + // does not matter as ESM imports are always hoisted. parts.append(js_ast.Part{ .stmts = stmts, .declared_symbols = declared_symbols, @@ -6296,6 +6087,103 @@ fn NewParser_( }) catch unreachable; } + pub fn generateReactRefreshImport(p: *P, parts: *ListManaged(js_ast.Part)) !void { + switch (p.options.features.hot_module_reloading) { + inline else => |hmr| try p.generateReactRefreshImportHmr(parts, hmr), + } + } + + fn generateReactRefreshImportHmr(p: *P, parts: *ListManaged(js_ast.Part), comptime hot_module_reloading: bool) !void { + // If `hot_module_reloading`, we are going to generate a require call: + // + // const { $RefreshSig$, $RefreshReg$ } = require("react-refresh/runtime")` + // + // Otherwise we are going to settle on an import statement. Using + // require is fine in HMR bundling because `react-refresh` itself is + // already a CommonJS module, and it will actually be more efficient + // at runtime this way. + const allocator = p.allocator; + const import_record_index = p.addImportRecordByRange(.stmt, logger.Range.None, "react-refresh/runtime"); + + const Item = if (hot_module_reloading) B.Object.Property else js_ast.ClauseItem; + + const len = 1 + @as(usize, @intFromBool(p.react_refresh.register_used)) + + @as(usize, @intFromBool(p.react_refresh.signature_used)); + var items = try List(Item).initCapacity(allocator, len); + + const stmts = try allocator.alloc(Stmt, 1); + var declared_symbols = DeclaredSymbol.List{}; + try declared_symbols.ensureTotalCapacity(allocator, len); + + const namespace_ref = try p.newSymbol(.other, "RefreshRuntime"); + declared_symbols.appendAssumeCapacity(.{ + .ref = namespace_ref, + .is_top_level = true, + }); + try p.module_scope.generated.push(allocator, namespace_ref); + + inline for (.{ + .{ + .name = "register", + .enabled = p.react_refresh.register_used, + .ref = p.react_refresh.register_ref, + }, + .{ + .name = "createSignatureFunctionForTransform", + .enabled = p.react_refresh.signature_used, + .ref = p.react_refresh.create_signature_ref, + }, + }) |entry| { + if (entry.enabled) { + items.appendAssumeCapacity(if (hot_module_reloading) .{ + .key = p.newExpr(E.String{ .data = entry.name }, logger.Loc.Empty), + .value = p.b(B.Identifier{ .ref = entry.ref }, logger.Loc.Empty), + } else .{ + .alias = entry.name, + .original_name = entry.name, + .alias_loc = logger.Loc{}, + .name = LocRef{ .ref = entry.ref, .loc = logger.Loc{} }, + }); + declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true }); + try p.module_scope.generated.push(allocator, entry.ref); + try p.is_import_item.put(allocator, entry.ref, {}); + try p.named_imports.put(entry.ref, .{ + .alias = entry.name, + .alias_loc = logger.Loc.Empty, + .namespace_ref = namespace_ref, + .import_record_index = import_record_index, + }); + } + } + + stmts[0] = p.s(if (hot_module_reloading) + S.Local{ + .kind = .k_const, + .decls = try Decl.List.fromSlice(p.allocator, &.{.{ + .binding = p.b(B.Object{ + .properties = items.items, + }, logger.Loc.Empty), + .value = p.newExpr(E.RequireString{ + .import_record_index = import_record_index, + }, logger.Loc.Empty), + }}), + } + else + S.Import{ + .namespace_ref = namespace_ref, + .items = items.items, + .import_record_index = import_record_index, + .is_single_line = false, + }, logger.Loc.Empty); + + try parts.append(.{ + .stmts = stmts, + .declared_symbols = declared_symbols, + .import_record_indices = try bun.BabyList(u32).fromSlice(allocator, &.{import_record_index}), + .tag = .runtime, + }); + } + fn substituteSingleUseSymbolInStmt(p: *P, stmt: Stmt, ref: Ref, replacement: Expr) bool { const expr: *Expr = brk: { switch (stmt.data) { @@ -6848,12 +6736,8 @@ fn NewParser_( var generated_symbols_count: u32 = 3; - if (p.options.features.hot_module_reloading) { + if (p.options.features.react_fast_refresh) { generated_symbols_count += 3; - - if (p.options.features.react_fast_refresh) { - generated_symbols_count += 1; - } } if (is_jsx_enabled) { @@ -6884,27 +6768,10 @@ fn NewParser_( p.jest.afterAll = try p.declareCommonJSSymbol(.unbound, "afterAll"); } - if (p.options.features.hot_module_reloading) { - p.hmr_module = try p.declareGeneratedSymbol(.other, "hmr"); - if (p.options.features.react_fast_refresh) { - if (p.options.jsx.use_embedded_refresh_runtime) { - p.runtime_imports.__FastRefreshRuntime = try p.declareGeneratedSymbol(.other, "__FastRefreshRuntime"); - p.recordUsage(p.runtime_imports.__FastRefreshRuntime.?.ref); - p.jsx_refresh_runtime = p.runtime_imports.__FastRefreshRuntime.?; - } else { - p.jsx_refresh_runtime = try p.declareGeneratedSymbol(.other, "Refresher"); - } - - p.runtime_imports.__FastRefreshModule = try p.declareGeneratedSymbol(.other, "__FastRefreshModule"); - p.recordUsage(p.runtime_imports.__FastRefreshModule.?.ref); - } else { - p.runtime_imports.__HMRModule = try p.declareGeneratedSymbol(.other, "__HMRModule"); - p.recordUsage(p.runtime_imports.__HMRModule.?.ref); - } - - p.runtime_imports.__HMRClient = try p.declareGeneratedSymbol(.other, "__HMRClient"); - p.recordUsage(p.hmr_module.ref); - p.recordUsage(p.runtime_imports.__HMRClient.?.ref); + if (p.options.features.react_fast_refresh) { + // this is .. obviously.. not correct + p.react_refresh.create_signature_ref = (try p.declareGeneratedSymbol(.other, "$RefreshSig$")).primary; + p.react_refresh.register_ref = (try p.declareGeneratedSymbol(.other, "$RefreshReg$")).primary; } // "React.createElement" and "createElement" become: @@ -6954,10 +6821,11 @@ fn NewParser_( fn ensureRequireSymbol(p: *P) void { if (p.runtime_imports.__require != null) return; - p.runtime_imports.__require = GeneratedSymbol{ - .backup = declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, StaticSymbolName.List.__require.backup, true) catch unreachable, + const static_symbol = comptime StaticSymbolName.init("__require"); + p.runtime_imports.__require = .{ + .backup = declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static_symbol.backup, true) catch bun.outOfMemory(), .primary = p.require_ref, - .ref = declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, StaticSymbolName.List.__require.internal, true) catch unreachable, + .ref = declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static_symbol.internal, true) catch bun.outOfMemory(), }; p.runtime_imports.put("__require", p.runtime_imports.__require.?); } @@ -6974,26 +6842,11 @@ fn NewParser_( } pub fn resolveBundlingSymbols(p: *P) void { - p.recordUsage(p.runtime_imports.@"$$m".?.ref); - - p.resolveGeneratedSymbol(&p.runtime_imports.@"$$m".?); - p.resolveGeneratedSymbol(&p.runtime_imports.@"$$lzy".?); p.resolveGeneratedSymbol(&p.runtime_imports.__export.?); p.resolveGeneratedSymbol(&p.runtime_imports.__exportValue.?); p.resolveGeneratedSymbol(&p.runtime_imports.__exportDefault.?); } - pub fn resolveHMRSymbols(p: *P) void { - p.resolveGeneratedSymbol(&p.hmr_module); - if (p.runtime_imports.__FastRefreshModule != null) { - p.resolveGeneratedSymbol(&p.runtime_imports.__FastRefreshModule.?); - if (p.options.jsx.use_embedded_refresh_runtime) - p.resolveGeneratedSymbol(&p.runtime_imports.__FastRefreshRuntime.?); - } - if (p.runtime_imports.__HMRModule != null) p.resolveGeneratedSymbol(&p.runtime_imports.__HMRModule.?); - if (p.runtime_imports.__HMRClient != null) p.resolveGeneratedSymbol(&p.runtime_imports.__HMRClient.?); - } - pub fn resolveStaticJSXSymbols(p: *P) void { if (p.options.bundle) return; @@ -8000,7 +7853,7 @@ fn NewParser_( fn parseFn(p: *P, name: ?js_ast.LocRef, opts: FnOrArrowDataParse) anyerror!G.Fn { // if data.allowAwait and data.allowYield { - // p.markSyntaxFeature(compat.AsyncGenerator, data.asyncRange) + // p.markSyntaxFeature(compat.AsyncGenerator, data.asyncRange) // } var func = G.Fn{ @@ -9163,8 +9016,8 @@ fn NewParser_( }) catch unreachable; } } else { - var path_name = fs.PathName.init(strings.append(p.allocator, "import_", path.text) catch unreachable); - const name = try path_name.nonUniqueNameString(p.allocator); + var path_name = fs.PathName.init(path.text); + const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator)); stmt.namespace_ref = try p.newSymbol(.other, name); var scope: *Scope = p.current_scope; try scope.generated.push(p.allocator, stmt.namespace_ref); @@ -9188,6 +9041,18 @@ fn NewParser_( name_loc.ref = ref; try p.is_import_item.put(p.allocator, ref, {}); + // ensure every e_import_identifier holds the namespace + if (p.options.features.hot_module_reloading) { + const symbol = &p.symbols.items[ref.inner_index]; + if (symbol.namespace_alias == null) { + symbol.namespace_alias = .{ + .namespace_ref = stmt.namespace_ref, + .alias = "default", + .import_record_index = stmt.import_record_index, + }; + } + } + if (macro_remap) |*remap| { if (remap.get("default")) |remapped_path| { const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); @@ -9239,6 +9104,18 @@ fn NewParser_( try p.is_import_item.put(p.allocator, ref, {}); p.checkForNonBMPCodePoint(item.alias_loc, item.alias); + // ensure every e_import_identifier holds the namespace + if (p.options.features.hot_module_reloading) { + const symbol = &p.symbols.items[ref.inner_index]; + if (symbol.namespace_alias == null) { + symbol.namespace_alias = .{ + .namespace_ref = stmt.namespace_ref, + .alias = name, + .import_record_index = stmt.import_record_index, + }; + } + } + if (macro_remap) |*remap| { if (remap.get(item.alias)) |remapped_path| { const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); @@ -9575,39 +9452,6 @@ fn NewParser_( }, loc); } - // For HMR, we must convert syntax like this: - // export function leftPad() { - // export const guy = GUY_FIERI_ASCII_ART; - // export class Bacon {} - // export default GuyFieriAsciiArt; - // export {Bacon}; - // export {Bacon as default}; - // to: - // var __hmr__module = new __hmr_HMRModule(file_id, import.meta); - // (__hmr__module._load = function() { - // __hmr__module.exports.leftPad = function () {}; - // __hmr__module.exports.npmProgressBar33 = true; - // __hmr__module.exports.Bacon = class {}; - // })(); - // export { __hmr__module.exports.leftPad as leftPad, __hmr__module.exports.npmProgressBar33 as npmProgressBar33, __hmr__module } - // - // - // - // At bottom of the file: - // - - // var __hmr__exports = new HMRModule({ - // leftPad: () => leftPad, - // npmProgressBar33 () => npmProgressBar33, - // default: () => GuyFieriAsciiArt, - // [__hmr_ModuleIDSymbol]: - //}); - // export { __hmr__exports.leftPad as leftPad, __hmr__ } - // - - // Then: - // if () { - // - // } - fn defaultNameForExpr(p: *P, expr: Expr, loc: logger.Loc) LocRef { switch (expr.data) { .e_function => |func_container| { @@ -10146,9 +9990,7 @@ fn NewParser_( try p.requireInitializers(.k_const, decls.items); } - // When HMR is enabled, replace all const/let exports with var - const kind = if (p.options.features.hot_module_reloading and opts.is_export) S.Local.Kind.k_var else S.Local.Kind.k_const; - return p.s(S.Local{ .kind = kind, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc); + return p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc); }, .t_if => { try p.lexer.next(); @@ -11216,7 +11058,6 @@ fn NewParser_( try p.defineExportedNamespaceBinding(exported_members, prop.binding); } }, - else => Output.panic("Unexpected binding: {s}", .{@tagName(binding.data)}), } } @@ -11528,8 +11369,7 @@ fn NewParser_( return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .stmt = p.s(S.Local{ - // Replace all "export let" with "export var" when HMR is enabled - .kind = if (opts.is_export and p.options.features.hot_module_reloading) .k_var else .k_let, + .kind = .k_let, .decls = G.Decl.List.fromList(decls), .is_export = opts.is_export, }, token_range.loc), @@ -12660,17 +12500,17 @@ fn NewParser_( } fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, comptime name: string) !GeneratedSymbol { - const static = @field(StaticSymbolName.List, name); + const static = comptime StaticSymbolName.init(name); if (p.options.bundle) { const ref = try declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static.primary, true); - return GeneratedSymbol{ + return .{ .backup = ref, .primary = ref, .ref = ref, }; } - return GeneratedSymbol{ + return .{ .backup = try declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static.backup, true), .primary = try declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static.primary, true), .ref = try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, static.internal, true), @@ -12685,7 +12525,6 @@ fn NewParser_( // p.checkForNonBMPCodePoint(loc, name) if (comptime !is_generated) { - // Forbid declaring a symbol with a reserved word in strict mode if (p.isStrictMode() and name.ptr != arguments_str.ptr and js_lexer.StrictModeReservedWords.has(name)) { try p.markStrictModeFeature(.reserved_word, js_lexer.rangeOfIdentifier(p.source, loc), name); @@ -12895,9 +12734,6 @@ fn NewParser_( p.declareBinding(kind, &prop.value, opts) catch unreachable; } }, - else => { - // @compileError("Missing binding type"); - }, } } @@ -16290,6 +16126,16 @@ fn NewParser_( var stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, body.stmts); var temp_opts = PrependTempRefsOpts{ .kind = StmtsKind.fn_body, .fn_body_loc = body.loc }; p.visitStmtsAndPrependTempRefs(&stmts, &temp_opts) catch unreachable; + + if (p.options.features.react_fast_refresh) { + const hook_storage = p.react_refresh.hook_ctx_storage orelse + unreachable; // caller did not init hook storage. any function can have react hooks! + + if (hook_storage.*) |*hook| { + p.handleReactRefreshPostVisitFunctionBody(&stmts, hook); + } + } + func.body = G.FnBody{ .stmts = stmts.items, .loc = body.loc }; p.popScope(); @@ -16297,6 +16143,7 @@ fn NewParser_( p.fn_or_arrow_data_visit = old_fn_or_arrow_data; p.fn_only_data_visit = old_fn_only_data; + return func; } @@ -16364,7 +16211,6 @@ fn NewParser_( // p.log.addRangeError(p.source, target.range, "Cannot use \"new.target\" here") catch unreachable; // } }, - .e_string => { // If you're using this, you're probably not using 0-prefixed legacy octal notation @@ -16506,7 +16352,6 @@ fn NewParser_( .was_originally_identifier = true, }); }, - .e_jsx_element => |e_| { switch (comptime jsx_transform_type) { .react => { @@ -16707,7 +16552,6 @@ fn NewParser_( else => unreachable, } }, - .e_template => |e_| { if (e_.tag) |tag| { e_.tag = p.visitExpr(tag); @@ -16766,7 +16610,6 @@ fn NewParser_( return e_.fold(p.allocator, expr.loc); } }, - .e_binary => |e_| { // The handling of binary expressions is convoluted because we're using @@ -17659,11 +17502,30 @@ fn NewParser_( } } + // In fast refresh, any function call that looks like a hook (/^use[A-Z]/) is a + // hook, even if it is not the value of `SExpr` or `SLocal`. It can be anywhere + // in the function call. This makes sense for some weird situations with `useCallback`, + // where it is not assigned to a variable. + // + // When we see a hook call, we need to hash it, and then mark a flag so that if + // it is assigned to a variable, that variable also get's hashed. + if (p.options.features.react_fast_refresh) try_record_hook: { + const original_name = switch (e_.target.data) { + inline .e_identifier, + .e_import_identifier, + .e_commonjs_export_identifier, + => |id| p.symbols.items[id.ref.innerIndex()].original_name, + .e_dot => |dot| dot.name, + else => break :try_record_hook, + }; + if (!ReactRefresh.isHookName(original_name)) break :try_record_hook; + p.handleReactRefreshHookCall(e_, original_name); + } + return expr; }, .e_new => |e_| { e_.target = p.visitExpr(e_.target); - // p.warnA for (e_.args.slice()) |*arg| { arg.* = p.visitExpr(arg.*); @@ -17701,8 +17563,13 @@ fn NewParser_( }); p.pushScopeForVisitPass(.function_body, e_.body.loc) catch unreachable; + var react_hook_data: ?ReactRefresh.HookContext = null; + const prev = p.react_refresh.hook_ctx_storage; + defer p.react_refresh.hook_ctx_storage = prev; + p.react_refresh.hook_ctx_storage = &react_hook_data; + var stmts_list = ListManaged(Stmt).fromOwnedSlice(p.allocator, dupe); - var temp_opts = PrependTempRefsOpts{ .kind = StmtsKind.fn_body }; + var temp_opts = PrependTempRefsOpts{ .kind = .fn_body }; p.visitStmtsAndPrependTempRefs(&stmts_list, &temp_opts) catch unreachable; p.allocator.free(e_.body.stmts); e_.body.stmts = stmts_list.items; @@ -17711,16 +17578,42 @@ fn NewParser_( p.fn_only_data_visit.is_inside_async_arrow_fn = old_inside_async_arrow_fn; p.fn_or_arrow_data_visit = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_visit), &old_fn_or_arrow_data); + + if (react_hook_data) |*hook| try_mark_hook: { + const stmts = p.nearest_stmt_list orelse break :try_mark_hook; + stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory(); + + p.handleReactRefreshPostVisitFunctionBody(&stmts_list, hook); + e_.body.stmts = stmts_list.items; + + return p.getReactRefreshHookSignalInit(hook, expr); + } }, .e_function => |e_| { if (p.is_revisit_for_substitution) { return expr; } + var react_hook_data: ?ReactRefresh.HookContext = null; + const prev = p.react_refresh.hook_ctx_storage; + defer p.react_refresh.hook_ctx_storage = prev; + p.react_refresh.hook_ctx_storage = &react_hook_data; + e_.func = p.visitFunc(e_.func, expr.loc); - if (e_.func.name) |name| { - return p.keepExprSymbolName(expr, p.symbols.items[name.ref.?.innerIndex()].original_name); + + var final_expr = expr; + + if (react_hook_data) |*hook| try_mark_hook: { + const stmts = p.nearest_stmt_list orelse break :try_mark_hook; + stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory(); + final_expr = p.getReactRefreshHookSignalInit(hook, expr); } + + if (e_.func.name) |name| { + final_expr = p.keepExprSymbolName(final_expr, p.symbols.items[name.ref.?.innerIndex()].original_name); + } + + return final_expr; }, .e_class => |e_| { if (p.is_revisit_for_substitution) { @@ -18443,11 +18336,18 @@ fn NewParser_( } fn selectLocalKind(p: *P, kind: S.Local.Kind) S.Local.Kind { + // When using Kit's HMR implementation, we need to preserve the local kind + // if possible, as more efficient code can be generated if something is known + // not to be an ESM live binding. + if (p.options.features.hot_module_reloading) { + return kind; + } + // Use "var" instead of "let" and "const" if the variable declaration may // need to be separated from the initializer. This allows us to safely move // this declaration into a nested scope. if ((p.options.bundle or p.will_wrap_module_in_try_catch_for_using) and - (p.current_scope.parent == null and !kind.isUsing())) + p.current_scope.parent == null and !kind.isUsing()) { return .k_var; } @@ -18760,7 +18660,7 @@ fn NewParser_( } } - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.shouldUnwrapCommonJSToESM()) { if (!p.is_control_flow_dead and id.ref.eql(p.exports_ref)) { if (!p.commonjs_named_exports_deoptimized) { if (identifier_opts.is_delete_target) { @@ -18907,7 +18807,7 @@ fn NewParser_( } }, .e_module_dot_exports => { - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.shouldUnwrapCommonJSToESM()) { if (!p.is_control_flow_dead) { if (!p.commonjs_named_exports_deoptimized) { if (identifier_opts.is_delete_target) { @@ -19061,7 +18961,6 @@ fn NewParser_( switch (stmt.data) { // These don't contain anything to traverse - .s_debugger, .s_empty, .s_comment => { p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix; }, @@ -19210,7 +19109,6 @@ fn NewParser_( } }, .s_export_star => |data| { - // "export * from 'path'" const name = p.loadNameFromRef(data.namespace_ref); data.namespace_ref = try p.newSymbol(.other, name); @@ -19334,8 +19232,24 @@ fn NewParser_( name = js_ast.ClauseItem.default_alias; } + var react_hook_data: ?ReactRefresh.HookContext = null; + const prev = p.react_refresh.hook_ctx_storage; + defer p.react_refresh.hook_ctx_storage = prev; + p.react_refresh.hook_ctx_storage = &react_hook_data; + func.func = p.visitFunc(func.func, func.func.open_parens_loc); + if (react_hook_data) |*hook| { + stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory(); + + data.value = .{ + .expr = p.getReactRefreshHookSignalInit(hook, p.newExpr( + E.Function{ .func = func.func }, + stmt.loc, + )), + }; + } + if (p.is_control_flow_dead) { return; } @@ -19412,13 +19326,10 @@ fn NewParser_( } }, .s_export_equals => |data| { - // "module.exports = value" stmts.append( Stmt.assign( - p.@"module.exports"( - stmt.loc, - ), + p.@"module.exports"(stmt.loc), p.visitExpr(data.value), ), ) catch unreachable; @@ -19532,6 +19443,26 @@ fn NewParser_( return; } } + + try stmts.append(stmt.*); + + if (p.options.features.react_fast_refresh and p.current_scope == p.module_scope) { + for (data.decls.slice()) |decl| try_register: { + const val = decl.value orelse break :try_register; + switch (val.data) { + .e_arrow, .e_function => {}, + else => break :try_register, + } + const id = switch (decl.binding.data) { + .b_identifier => |id| id.ref, + else => break :try_register, + }; + const original_name = p.symbols.items[id.innerIndex()].original_name; + try p.handleReactRefreshRegister(stmts, original_name, id); + } + } + + return; }, .s_expr => |data| { const should_trim_primitive = p.options.features.dead_code_elimination and @@ -19540,7 +19471,7 @@ fn NewParser_( defer p.stmt_expr_value = .{ .e_missing = .{} }; const is_top_level = p.current_scope == p.module_scope; - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.shouldUnwrapCommonJSToESM()) { p.commonjs_named_exports_needs_conversion = if (is_top_level) std.math.maxInt(u32) else @@ -19556,7 +19487,7 @@ fn NewParser_( // simplify unused data.value = SideEffects.simplifyUnusedExpr(p, data.value) orelse return; - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.shouldUnwrapCommonJSToESM()) { if (is_top_level) { if (data.value.data == .e_binary) { const to_convert = p.commonjs_named_exports_needs_conversion; @@ -19637,7 +19568,6 @@ fn NewParser_( data.value = p.visitExpr(data.value); }, .s_return => |data| { - // Forbid top-level return inside modules with ECMAScript-style exports if (p.fn_or_arrow_data_visit.is_outside_fn_or_arrow) { const where = where: { @@ -20027,44 +19957,58 @@ fn NewParser_( } } + var react_hook_data: ?ReactRefresh.HookContext = null; + const prev = p.react_refresh.hook_ctx_storage; + defer p.react_refresh.hook_ctx_storage = prev; + p.react_refresh.hook_ctx_storage = &react_hook_data; + data.func = p.visitFunc(data.func, data.func.open_parens_loc); + const name_ref = data.func.name.?.ref.?; + bun.assert(name_ref.tag == .symbol); + const name_symbol = &p.symbols.items[name_ref.innerIndex()]; + const original_name = name_symbol.original_name; + // Handle exporting this function from a namespace if (data.func.flags.contains(.is_export) and p.enclosing_namespace_arg_ref != null) { data.func.flags.remove(.is_export); - const enclosing_namespace_arg_ref = p.enclosing_namespace_arg_ref orelse unreachable; - stmts.ensureUnusedCapacity(3) catch unreachable; + const enclosing_namespace_arg_ref = p.enclosing_namespace_arg_ref orelse bun.outOfMemory(); + stmts.ensureUnusedCapacity(3) catch bun.outOfMemory(); stmts.appendAssumeCapacity(stmt.*); stmts.appendAssumeCapacity(Stmt.assign( p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = enclosing_namespace_arg_ref }, stmt.loc), - .name = p.loadNameFromRef(data.func.name.?.ref.?), + .name = original_name, .name_loc = data.func.name.?.loc, }, stmt.loc), p.newExpr(E.Identifier{ .ref = data.func.name.?.ref.? }, data.func.name.?.loc), )); } else if (!mark_as_dead) { - if (p.symbols.items[data.func.name.?.ref.?.innerIndex()].remove_overwritten_function_declaration) { + if (name_symbol.remove_overwritten_function_declaration) { return; } - stmts.append(stmt.*) catch unreachable; + stmts.append(stmt.*) catch bun.outOfMemory(); } else if (mark_as_dead) { - const name = data.func.name.?.ref.?; - if (p.options.features.replace_exports.getPtr(p.loadNameFromRef(name))) |replacement| { - _ = p.injectReplacementExport(stmts, name, data.func.name.?.loc, replacement); + if (p.options.features.replace_exports.getPtr(original_name)) |replacement| { + _ = p.injectReplacementExport(stmts, name_ref, data.func.name.?.loc, replacement); + } + } + + if (p.options.features.react_fast_refresh) { + if (react_hook_data) |*hook| { + try stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)); + try stmts.append(p.s(S.SExpr{ + .value = p.getReactRefreshHookSignalInit(hook, Expr.initIdentifier(name_ref, logger.Loc.Empty)), + }, logger.Loc.Empty)); + } + + if (p.current_scope == p.module_scope) { + try p.handleReactRefreshRegister(stmts, original_name, name_ref); } } - // stmts.appendAssumeCapacity( - // // i wonder if this will crash - // p.keepStmtSymbolName( - // data.func.name.?.loc, - // data.func.name.?.ref.?, - // p.symbols.items[data.func.name.?.ref.?.innerIndex()].original_name, - // ), - // ); return; }, .s_class => |data| { @@ -20399,6 +20343,10 @@ fn NewParser_( } } + if (p.options.features.react_fast_refresh) { + p.react_refresh.last_hook_seen = null; + } + if (only_scan_imports_and_do_not_visit) { @compileError("only_scan_imports_and_do_not_visit must not run this."); } @@ -20406,7 +20354,18 @@ fn NewParser_( .is_immediately_assigned_to_decl = true, }); - if (comptime FeatureFlags.unwrap_commonjs_to_esm) { + if (p.options.features.react_fast_refresh) { + // When hooks are immediately assigned to something, we need to hash the binding. + if (p.react_refresh.last_hook_seen) |last_hook| { + if (decl.value.?.data.as(.e_call)) |call| { + if (last_hook == call) { + decl.binding.data.writeToHasher(&p.react_refresh.hook_ctx_storage.?.*.?.hasher, p.symbols.items); + } + } + } + } + + if (p.shouldUnwrapCommonJSToESM()) { if (prev_require_to_convert_count < p.imports_to_convert_from_require.items.len) { if (decl.binding.data == .b_identifier) { const ref = decl.binding.data.b_identifier.ref; @@ -20676,9 +20635,6 @@ fn NewParser_( p.markExportedBindingInsideNamespace(ref, item.value); } }, - else => { - Output.panic("Unexpected binding type in namespace. This is a bug. {any}", .{binding}); - }, } } @@ -21583,9 +21539,6 @@ fn NewParser_( } } }, - else => { - p.panic("Unexpected binding {any}", .{binding}); - }, } } @@ -21909,19 +21862,12 @@ fn NewParser_( } fn keepStmtSymbolName(p: *P, loc: logger.Loc, ref: Ref, name: string) Stmt { - p.expr_list.ensureUnusedCapacity(2) catch unreachable; - const start = p.expr_list.items.len; - p.expr_list.appendAssumeCapacity(p.newExpr(E.Identifier{ - .ref = ref, - }, loc)); - p.expr_list.appendAssumeCapacity(p.newExpr(E.String{ .data = name }, loc)); - return p.s(S.SExpr{ - // I believe that this is a spot we can do $RefreshReg$(name) - .value = p.callRuntime(loc, "__name", p.expr_list.items[start..p.expr_list.items.len]), - - // Make sure tree shaking removes this if the function is never used - .does_not_affect_tree_shaking = true, - }, loc); + _ = p; + _ = loc; + _ = ref; + _ = name; + // TODO: + @compileError("not implemented"); } fn runtimeIdentifierRef(p: *P, loc: logger.Loc, comptime name: string) Ref { @@ -22027,6 +21973,10 @@ fn NewParser_( var visited = try ListManaged(Stmt).initCapacity(p.allocator, stmts.items.len); defer visited.deinit(); + const prev_nearest_stmt_list = p.nearest_stmt_list; + defer p.nearest_stmt_list = prev_nearest_stmt_list; + p.nearest_stmt_list = &before; + var preprocessed_enum_i: usize = 0; for (stmts.items) |*stmt| { @@ -22517,7 +22467,7 @@ fn NewParser_( fn extractDeclsForBinding(binding: Binding, decls: *ListManaged(G.Decl)) anyerror!void { switch (binding.data) { - .b_property, .b_missing => {}, + .b_missing => {}, .b_identifier => { try decls.append(G.Decl{ .binding = binding }); }, @@ -22772,8 +22722,10 @@ fn NewParser_( /// When not transpiling we dont use the renamer, so our solution is to generate really /// hard to collide with variables, instead of actually making things collision free pub fn generateTempRef(p: *P, default_name: ?string) Ref { - var scope = p.current_scope; + return p.generateTempRefWithScope(default_name, p.current_scope); + } + pub fn generateTempRefWithScope(p: *P, default_name: ?string, scope: *Scope) Ref { const name = (if (p.willUseRenamer()) default_name else null) orelse brk: { p.temp_ref_count += 1; break :brk std.fmt.allocPrint(p.allocator, "__bun_temp_ref_{x}$", .{p.temp_ref_count}) catch bun.outOfMemory(); @@ -22790,6 +22742,11 @@ fn NewParser_( } pub fn computeTsEnumsMap(p: *const P, allocator: Allocator) !js_ast.Ast.TsEnumsMap { + // When hot module reloading is enabled, we disable enum inlining + // to avoid making the HMR graph more complicated. + if (p.options.features.hot_module_reloading) + return .{}; + const InlinedEnumValue = js_ast.InlinedEnumValue; var map: js_ast.Ast.TsEnumsMap = .{}; try map.ensureTotalCapacity(allocator, @intCast(p.top_level_enums.items.len)); @@ -23102,15 +23059,177 @@ fn NewParser_( } }; + pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref) !void { + bun.assert(p.options.features.react_fast_refresh); + bun.assert(p.current_scope == p.module_scope); + + if (ReactRefresh.isComponentishName(original_name)) { + // $RefreshReg$(component, "file.ts:Original Name") + const loc = logger.Loc.Empty; + try stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{ + .target = Expr.initIdentifier(p.react_refresh.register_ref, loc), + .args = try ExprNodeList.fromSlice(p.allocator, &.{ + Expr.initIdentifier(ref, loc), + p.newExpr(E.String{ + .data = try bun.strings.concat(p.allocator, &.{ + p.source.path.pretty, + ":", + original_name, + }), + }, loc), + }), + }, loc) }, loc)); + + p.react_refresh.register_used = true; + } + } + + pub fn handleReactRefreshHookCall(p: *P, hook_call: *E.Call, original_name: []const u8) void { + bun.assert(p.options.features.react_fast_refresh); + bun.assert(ReactRefresh.isHookName(original_name)); + const ctx_storage = p.react_refresh.hook_ctx_storage orelse + return; // not in a function, ignore this hook call. + + // if this function has no hooks recorded, initialize a hook context + // every function visit provides stack storage, which it will inspect at visit finish. + const ctx: *ReactRefresh.HookContext = if (ctx_storage.*) |*ctx| ctx else init: { + p.react_refresh.signature_used = true; + + var scope = p.current_scope; + while (scope.kind != .function_body and scope.kind != .block and scope.kind != .entry) { + scope = scope.parent orelse break; + } + + ctx_storage.* = .{ + .hasher = std.hash.Wyhash.init(0), + .signature_cb = p.generateTempRefWithScope("_s", scope), + .user_hooks = .{}, + }; + + break :init &(ctx_storage.*.?); + }; + + ctx.hasher.update(original_name); + + if (ReactRefresh.built_in_hooks.get(original_name)) |built_in_hook| hash_arg: { + const arg_index: usize = switch (built_in_hook) { + // useState first argument is initial state. + .useState => 0, + // useReducer second argument is initial state. + .useReducer => 1, + else => break :hash_arg, + }; + if (hook_call.args.len <= arg_index) break :hash_arg; + const arg = hook_call.args.slice()[arg_index]; + arg.data.writeToHasher(&ctx.hasher, p.symbols.items); + } else switch (hook_call.target.data) { + inline .e_identifier, + .e_import_identifier, + .e_commonjs_export_identifier, + => |id| { + const gop = ctx.user_hooks.getOrPut(p.allocator, id.ref) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = Expr.initIdentifier(id.ref, logger.Loc.Empty); + } + }, + else => {}, + } + + ctx.hasher.update("\x00"); + } + + pub fn handleReactRefreshPostVisitFunctionBody(p: *P, stmts: *ListManaged(Stmt), hook: *ReactRefresh.HookContext) void { + bun.assert(p.options.features.react_fast_refresh); + + // We need to prepend `_s();` as a statement. + if (stmts.items.len == stmts.capacity) { + // If the ArrayList does not have enough capacity, it is + // re-allocated entirely to fit. Only one slot of new capacity + // is used since we know this statement list is not going to be + // appended to afterwards; This function is a post-visit handler. + const new_stmts = p.allocator.alloc(Stmt, stmts.items.len + 1) catch bun.outOfMemory(); + @memcpy(new_stmts[1..], stmts.items); + stmts.deinit(); + stmts.* = ListManaged(Stmt).fromOwnedSlice(p.allocator, new_stmts); + } else { + // The array has enough capacity, so there is no possibility of + // allocation failure. We just move all of the statements over + // by one, and increase the length using `addOneAssumeCapacity` + _ = stmts.addOneAssumeCapacity(); + bun.copy(Stmt, stmts.items[1..], stmts.items[0 .. stmts.items.len - 1]); + } + + const loc = logger.Loc.Empty; + const prepended_stmt = p.s(S.SExpr{ .value = p.newExpr(E.Call{ + .target = Expr.initIdentifier(hook.signature_cb, loc), + }, loc) }, loc); + stmts.items[0] = prepended_stmt; + } + + pub fn getReactRefreshHookSignalDecl(p: *P, signal_cb_ref: Ref) Stmt { + const loc = logger.Loc.Empty; + // var s_ = $RefreshSig$(); + return p.s(S.Local{ .decls = G.Decl.List.fromSlice(p.allocator, &.{.{ + .binding = p.b(B.Identifier{ .ref = signal_cb_ref }, loc), + .value = p.newExpr(E.Call{ + .target = Expr.initIdentifier(p.react_refresh.create_signature_ref, loc), + }, loc), + }}) catch bun.outOfMemory() }, loc); + } + + pub fn getReactRefreshHookSignalInit(p: *P, ctx: *ReactRefresh.HookContext, function_with_hook_calls: Expr) Expr { + const loc = logger.Loc.Empty; + + const final = ctx.hasher.final(); + const hash_data = p.allocator.alloc(u8, comptime bun.base64.encodeLenFromSize(@sizeOf(@TypeOf(final)))) catch bun.outOfMemory(); + bun.assert(bun.base64.encode(hash_data, std.mem.asBytes(&final)) == hash_data.len); + + const have_custom_hooks = ctx.user_hooks.count() > 0; + const have_force_arg = have_custom_hooks or p.react_refresh.force_reset; + + const args = p.allocator.alloc( + Expr, + 2 + + @as(usize, @intFromBool(have_force_arg)) + + @as(usize, @intFromBool(have_custom_hooks)), + ) catch bun.outOfMemory(); + + args[0] = function_with_hook_calls; + args[1] = p.newExpr(E.String{ .data = hash_data }, loc); + + if (have_force_arg) args[2] = p.newExpr(E.Boolean{ .value = p.react_refresh.force_reset }, loc); + + if (have_custom_hooks) { + // () => [useCustom1, useCustom2] + args[3] = p.newExpr(E.Arrow{ + .body = .{ + .stmts = p.allocator.dupe(Stmt, &.{ + p.s(S.Return{ .value = p.newExpr(E.Array{ + .items = ExprNodeList.init(ctx.user_hooks.values()), + }, loc) }, loc), + }) catch bun.outOfMemory(), + .loc = loc, + }, + .prefer_expr = true, + }, loc); + } + + // _s(func, "", force, () => [useCustom]) + return p.newExpr(E.Call{ + .target = Expr.initIdentifier(ctx.signature_cb, loc), + .args = ExprNodeList.init(args), + }, loc); + } + pub fn toAST( p: *P, - _parts: []js_ast.Part, + input_parts: []js_ast.Part, exports_kind: js_ast.ExportsKind, - commonjs_wrapper_expr: CommonJSWrapper, + wrap_mode: WrapMode, hashbang: []const u8, ) !js_ast.Ast { const allocator = p.allocator; - var parts = _parts; + var parts = input_parts; // if (p.options.tree_shaking and p.options.features.trim_unused_imports) { // p.treeShake(&parts, false); @@ -23118,661 +23237,183 @@ fn NewParser_( const bundling = p.options.bundle; var parts_end: usize = @as(usize, @intFromBool(bundling)); - // Handle import paths after the whole file has been visited because we need - // symbol usage counts to be able to remove unused type-only imports in - // TypeScript code. - while (true) { - var kept_import_equals = false; - var removed_import_equals = false; - const begin = parts_end; - // Potentially remove some statements, then filter out parts to remove any - // with no statements - for (parts[begin..]) |part_| { - var part = part_; - p.import_records_for_current_part.clearRetainingCapacity(); - p.declared_symbols.clearRetainingCapacity(); + // When bundling with HMR, we need every module to be just a + // single part, as we later wrap each module into a function, + // which requires a single part. Otherwise, you'll end up with + // multiple instances of a module, each with different parts of + // the file. That is also why tree-shaking is disabled. + if (p.options.features.hot_module_reloading) { + bun.assert(!p.options.tree_shaking); + bun.assert(p.options.features.hot_module_reloading); - const result = try ImportScanner.scan(P, p, part.stmts, commonjs_wrapper_expr != .none); - kept_import_equals = kept_import_equals or result.kept_import_equals; - removed_import_equals = removed_import_equals or result.removed_import_equals; + var hmr_transform_ctx = ConvertESMExportsForHmr{ .last_part = &parts[parts.len - 1] }; + try hmr_transform_ctx.stmts.ensureTotalCapacity(p.allocator, prealloc_count: { + // get a estimate on how many statements there are going to be + var count: usize = 0; + for (parts) |part| count += part.stmts.len; + break :prealloc_count count + 2; + }); - part.stmts = result.stmts; - if (part.stmts.len > 0) { - if (p.module_scope.contains_direct_eval and part.declared_symbols.len() > 0) { - // If this file contains a direct call to "eval()", all parts that - // declare top-level symbols must be kept since the eval'd code may - // reference those symbols. - part.can_be_removed_if_unused = false; - } - if (part.declared_symbols.len() == 0) { - part.declared_symbols = p.declared_symbols.clone(p.allocator) catch unreachable; - } else { - part.declared_symbols.appendList(p.allocator, p.declared_symbols) catch unreachable; + for (parts) |part| { + // Kit does not care about 'import =', as it handles it on it's own + _ = try ImportScanner.scan(P, p, part.stmts, wrap_mode != .none, true, &hmr_transform_ctx); + } + + parts = try hmr_transform_ctx.finalize(p, parts); + } else { + // Handle import paths after the whole file has been visited because we need + // symbol usage counts to be able to remove unused type-only imports in + // TypeScript code. + while (true) { + var kept_import_equals = false; + var removed_import_equals = false; + + const begin = parts_end; + // Potentially remove some statements, then filter out parts to remove any + // with no statements + for (parts[begin..]) |part_| { + var part = part_; + p.import_records_for_current_part.clearRetainingCapacity(); + p.declared_symbols.clearRetainingCapacity(); + + const result = try ImportScanner.scan(P, p, part.stmts, wrap_mode != .none, false, {}); + kept_import_equals = kept_import_equals or result.kept_import_equals; + removed_import_equals = removed_import_equals or result.removed_import_equals; + + part.stmts = result.stmts; + if (part.stmts.len > 0) { + if (p.module_scope.contains_direct_eval and part.declared_symbols.len() > 0) { + // If this file contains a direct call to "eval()", all parts that + // declare top-level symbols must be kept since the eval'd code may + // reference those symbols. + part.can_be_removed_if_unused = false; + } + if (part.declared_symbols.len() == 0) { + part.declared_symbols = p.declared_symbols.clone(p.allocator) catch unreachable; + } else { + part.declared_symbols.appendList(p.allocator, p.declared_symbols) catch unreachable; + } + + if (part.import_record_indices.len == 0) { + part.import_record_indices = @TypeOf(part.import_record_indices).init( + (p.import_records_for_current_part.clone(p.allocator) catch unreachable).items, + ); + } else { + part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable; + } + + parts[parts_end] = part; + parts_end += 1; } + } - if (part.import_record_indices.len == 0) { - part.import_record_indices = @TypeOf(part.import_record_indices).init( - (p.import_records_for_current_part.clone(p.allocator) catch unreachable).items, - ); - } else { - part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable; - } - - parts[parts_end] = part; - parts_end += 1; + // We need to iterate multiple times if an import-equals statement was + // removed and there are more import-equals statements that may be removed + if (!kept_import_equals or !removed_import_equals) { + break; } } - // We need to iterate multiple times if an import-equals statement was - // removed and there are more import-equals statements that may be removed - if (!kept_import_equals or !removed_import_equals) { - break; - } - } + // leave the first part in there for namespace export when bundling + parts = parts[0..parts_end]; - // leave the first part in there for namespace export when bundling - parts = parts[0..parts_end]; - - // Do a second pass for exported items now that imported items are filled out - for (parts) |part| { - for (part.stmts) |stmt| { - switch (stmt.data) { - .s_export_clause => |clause| { - for (clause.items) |item| { - if (p.named_imports.getEntry(item.name.ref.?)) |_import| { - _import.value_ptr.is_exported = true; + // Do a second pass for exported items now that imported items are filled out. + // This isn't done for HMR because it already deletes all `.s_export_clause`s + for (parts) |part| { + for (part.stmts) |stmt| { + switch (stmt.data) { + .s_export_clause => |clause| { + for (clause.items) |item| { + if (p.named_imports.getEntry(item.name.ref.?)) |_import| { + _import.value_ptr.is_exported = true; + } } - } - }, - else => {}, + }, + else => {}, + } } } } - switch (commonjs_wrapper_expr) { - .bun_dev => |commonjs_wrapper| { - var require_function_args = allocator.alloc(Arg, 2) catch unreachable; - var final_part_stmts_count: usize = 0; + if (wrap_mode == .bun_commonjs and !p.options.features.remove_cjs_module_wrapper) { + // This transforms the user's code into. + // + // (function (exports, require, module, __filename, __dirname) { + // ... + // }) + // + // which is then called in `evaluateCommonJSModuleOnce` + var args = allocator.alloc(Arg, 5 + @as(usize, @intFromBool(p.has_import_meta))) catch bun.outOfMemory(); + args[0..5].* = .{ + Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) }, + Arg{ .binding = p.b(B.Identifier{ .ref = p.require_ref }, logger.Loc.Empty) }, + Arg{ .binding = p.b(B.Identifier{ .ref = p.module_ref }, logger.Loc.Empty) }, + Arg{ .binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty) }, + Arg{ .binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty) }, + }; + if (p.has_import_meta) { + p.import_meta_ref = p.newSymbol(.other, "$Bun_import_meta") catch bun.outOfMemory(); + args[5] = Arg{ .binding = p.b(B.Identifier{ .ref = p.import_meta_ref }, logger.Loc.Empty) }; + } - var imports_count: u32 = 0; - // We have to also move export from, since we will preserve those - var exports_from_count: u32 = 0; + var total_stmts_count: usize = 0; + for (parts) |part| { + total_stmts_count += part.stmts.len; + } - // Two passes. First pass just counts. - for (parts) |part| { - for (part.stmts) |stmt| { - imports_count += switch (stmt.data) { - .s_import => @as(u32, 1), - else => @as(u32, 0), - }; + const preserve_strict_mode = p.module_scope.strict_mode == .explicit_strict_mode and + !(parts.len > 0 and + parts[0].stmts.len > 0 and + parts[0].stmts[0].data == .s_directive); - exports_from_count += switch (stmt.data) { - .s_export_star, .s_export_from => @as(u32, 1), - else => @as(u32, 0), - }; + total_stmts_count += @as(usize, @intCast(@intFromBool(preserve_strict_mode))); - final_part_stmts_count += switch (stmt.data) { - .s_import, .s_export_star, .s_export_from => @as(usize, 0), - else => @as(usize, 1), - }; - } + const stmts_to_copy = allocator.alloc(Stmt, total_stmts_count) catch bun.outOfMemory(); + { + var remaining_stmts = stmts_to_copy; + if (preserve_strict_mode) { + remaining_stmts[0] = p.s( + S.Directive{ + .value = "use strict", + }, + p.module_scope_directive_loc, + ); + remaining_stmts = remaining_stmts[1..]; } - var new_stmts_list = allocator.alloc(Stmt, exports_from_count + imports_count + 1) catch unreachable; - const final_stmts_list = allocator.alloc(Stmt, final_part_stmts_count) catch unreachable; - var remaining_final_stmts = final_stmts_list; - var imports_list = new_stmts_list[0..imports_count]; - - var exports_list: []Stmt = if (exports_from_count > 0) new_stmts_list[imports_list.len + 1 ..] else &[_]Stmt{}; - - require_function_args[0] = G.Arg{ .binding = p.b(B.Identifier{ .ref = p.module_ref }, logger.Loc.Empty) }; - require_function_args[1] = G.Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) }; - - var imports_list_i: u32 = 0; - var exports_list_i: u32 = 0; - for (parts) |part| { - for (part.stmts) |*stmt| { - switch (stmt.data) { - .s_import => { - imports_list[imports_list_i] = stmt.*; - stmt.loc = imports_list[imports_list_i].loc; - imports_list_i += 1; - }, - - .s_export_star, .s_export_from => { - exports_list[exports_list_i] = stmt.*; - stmt.loc = exports_list[exports_list_i].loc; - exports_list_i += 1; - }, - else => { - remaining_final_stmts[0] = stmt.*; - remaining_final_stmts = remaining_final_stmts[1..]; - }, - } - stmt.* = Stmt.empty(); + for (part.stmts, remaining_stmts[0..part.stmts.len]) |src, *dest| { + dest.* = src; } + remaining_stmts = remaining_stmts[part.stmts.len..]; } + } - commonjs_wrapper.data.e_call.args.ptr[0] = p.newExpr( - E.Function{ .func = G.Fn{ + const wrapper = p.newExpr( + E.Function{ + .func = G.Fn{ .name = null, .open_parens_loc = logger.Loc.Empty, - .args = require_function_args, - .body = .{ .loc = logger.Loc.Empty, .stmts = final_stmts_list }, - .flags = Flags.Function.init(.{ .is_export = true }), - } }, - logger.Loc.Empty, - ); - var sourcefile_name = p.source.path.pretty; - if (strings.lastIndexOf(sourcefile_name, "node_modules")) |node_modules_i| { - // 1 for the separator - const end = node_modules_i + 1 + "node_modules".len; - // If you were to name your file "node_modules.js" it shouldn't appear as ".js" - if (end < sourcefile_name.len) { - sourcefile_name = sourcefile_name[end..]; - } - } - commonjs_wrapper.data.e_call.args.ptr[1] = p.newExpr(E.String{ .data = sourcefile_name }, logger.Loc.Empty); - - new_stmts_list[imports_list.len] = p.s( - S.ExportDefault{ - .value = .{ - .expr = commonjs_wrapper, - }, - .default_name = LocRef{ .ref = null, .loc = logger.Loc.Empty }, + .args = args, + .body = .{ .loc = logger.Loc.Empty, .stmts = stmts_to_copy }, + .flags = Flags.Function.init(.{ .is_export = false }), }, - logger.Loc.Empty, - ); - parts[parts.len - 1].stmts = new_stmts_list; - }, + }, + logger.Loc.Empty, + ); - .bun_js => { - // if remove_cjs_module_wrapper is true, `evaluateCommonJSModuleOnce` will put exports, require, module, __filename, and - // __dirname on the globalObject. - if (!p.options.features.remove_cjs_module_wrapper) { - // This transforms the user's code into. - // - // (function (exports, require, module, __filename, __dirname) { - // ... - // }) - // - // which is then called in `evaluateCommonJSModuleOnce` - var args = allocator.alloc(Arg, 5 + @as(usize, @intFromBool(p.has_import_meta))) catch bun.outOfMemory(); - args[0..5].* = .{ - Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) }, - Arg{ .binding = p.b(B.Identifier{ .ref = p.require_ref }, logger.Loc.Empty) }, - Arg{ .binding = p.b(B.Identifier{ .ref = p.module_ref }, logger.Loc.Empty) }, - Arg{ .binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty) }, - Arg{ .binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty) }, - }; - if (p.has_import_meta) { - p.import_meta_ref = p.newSymbol(.other, "$Bun_import_meta") catch bun.outOfMemory(); - args[5] = Arg{ .binding = p.b(B.Identifier{ .ref = p.import_meta_ref }, logger.Loc.Empty) }; - } - - var total_stmts_count: usize = 0; - for (parts) |part| { - total_stmts_count += part.stmts.len; - } - - const preserve_strict_mode = p.module_scope.strict_mode == .explicit_strict_mode and - !(parts.len > 0 and - parts[0].stmts.len > 0 and - parts[0].stmts[0].data == .s_directive); - - total_stmts_count += @as(usize, @intCast(@intFromBool(preserve_strict_mode))); - - const stmts_to_copy = allocator.alloc(Stmt, total_stmts_count) catch bun.outOfMemory(); - { - var remaining_stmts = stmts_to_copy; - if (preserve_strict_mode) { - remaining_stmts[0] = p.s( - S.Directive{ - .value = "use strict", - }, - p.module_scope_directive_loc, - ); - remaining_stmts = remaining_stmts[1..]; - } - - for (parts) |part| { - for (part.stmts, remaining_stmts[0..part.stmts.len]) |src, *dest| { - dest.* = src; - } - remaining_stmts = remaining_stmts[part.stmts.len..]; - } - } - - const wrapper = p.newExpr( - E.Function{ - .func = G.Fn{ - .name = null, - .open_parens_loc = logger.Loc.Empty, - .args = args, - .body = .{ .loc = logger.Loc.Empty, .stmts = stmts_to_copy }, - .flags = Flags.Function.init(.{ .is_export = false }), - }, - }, - logger.Loc.Empty, - ); - - var top_level_stmts = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory(); - parts[0].stmts = top_level_stmts; - top_level_stmts[0] = p.s( - S.SExpr{ - .value = wrapper, - }, - logger.Loc.Empty, - ); - parts.len = 1; - } - }, - - .none => { - if (p.options.features.hot_module_reloading and p.options.features.allow_runtime) { - const named_exports_count: usize = p.named_exports.count(); - const named_imports: js_ast.Ast.NamedImports = p.named_imports; - - // To transform to something HMR'able, we must: - // 1. Wrap the top level code in an IIFE - // 2. Move imports to the top of the file (preserving the order) - // 3. Remove export clauses (done during ImportScanner) - // 4. Move export * from and export from to the bottom of the file (or the top, it doesn't matter I don't think) - // 5. Export everything as getters in our HMR module - // 6. Call the HMRModule's exportAll function like so: - // __hmrModule.exportAll({ - // exportAlias: () => identifier, - // exportAlias: () => identifier, - // }); - // This has the unfortunate property of making property accesses of exports slower at runtime. - // But, I'm not sure there's a way to use regular properties without breaking stuff. - var imports_count: usize = 0; - // We have to also move export from, since we will preserve those - var exports_from_count: usize = 0; - // Two passes. First pass just counts. - for (parts[parts.len - 1].stmts) |stmt| { - imports_count += switch (stmt.data) { - .s_import => @as(usize, 1), - else => @as(usize, 0), - }; - exports_from_count += switch (stmt.data) { - .s_export_star, .s_export_from => @as(usize, 1), - else => @as(usize, 0), - }; - } - var part = &parts[parts.len - 1]; - - const end_iife_stmts_count = part.stmts.len - imports_count - exports_from_count + 1; - // Why 7? - // 1. HMRClient.activate(${isDebug}); - // 2. var __hmrModule = new HMMRModule(id, file_path), __exports = __hmrModule.exports; - // 3. (__hmrModule.load = function() { - // ${...end_iffe_stmts_count - 1} - // ${end_iffe_stmts_count} - // __hmrModule.exportAll({exportAlias: () => identifier}) <-- ${named_exports_count} - // (); - // 4. var __hmrExport_exportName = __hmrModule.exports.exportName, - // 5. export { __hmrExport_exportName as blah, ... } - // 6. __hmrModule.onSetExports = (newExports) => { - // $named_exports_count __hmrExport_exportName = newExports.exportName; <-- ${named_exports_count} - // } - - // if there are no exports: - // - there shouldn't be an export statement - // - we don't need the S.Local for wrapping the exports - // We still call exportAll just with an empty object. - const has_any_exports = named_exports_count > 0; - - const toplevel_stmts_count = 3 + (@as(usize, @intCast(@intFromBool(has_any_exports))) * 2); - var _stmts = allocator.alloc( - Stmt, - end_iife_stmts_count + toplevel_stmts_count + (named_exports_count * 2) + imports_count + exports_from_count, - ) catch unreachable; - // Normally, we'd have to grow that inner function's stmts list by one - // But we can avoid that by just making them all use this same array. - var curr_stmts = _stmts; - - // in debug: crash in the printer due to undefined memory - // in release: print ";" instead. - // this should never happen regardless, but i'm just being cautious here. - if (comptime !Environment.isDebug) { - @memset(_stmts, Stmt.empty()); - } - - // Second pass: move any imports from the part's stmts array to the new stmts - var imports_list = curr_stmts[0..imports_count]; - curr_stmts = curr_stmts[imports_list.len..]; - var toplevel_stmts = curr_stmts[0..toplevel_stmts_count]; - curr_stmts = curr_stmts[toplevel_stmts.len..]; - var exports_from = curr_stmts[0..exports_from_count]; - curr_stmts = curr_stmts[exports_from.len..]; - // This is used for onSetExports - var update_function_stmts = curr_stmts[0..named_exports_count]; - curr_stmts = curr_stmts[update_function_stmts.len..]; - var export_all_function_body_stmts = curr_stmts[0..named_exports_count]; - curr_stmts = curr_stmts[export_all_function_body_stmts.len..]; - // This is the original part statements + 1 - var part_stmts = curr_stmts; - if (comptime Environment.allow_assert) assert(part_stmts.len == end_iife_stmts_count); - var part_stmts_i: usize = 0; - - var import_list_i: usize = 0; - var export_list_i: usize = 0; - - // We must always copy it into the new stmts array - for (part.stmts) |stmt| { - switch (stmt.data) { - .s_import => { - imports_list[import_list_i] = stmt; - import_list_i += 1; - }, - .s_export_star, .s_export_from => { - exports_from[export_list_i] = stmt; - export_list_i += 1; - }, - else => { - part_stmts[part_stmts_i] = stmt; - part_stmts_i += 1; - }, - } - } - - const new_call_args_count: usize = if (p.options.features.react_fast_refresh) 3 else 2; - var call_args = try allocator.alloc(Expr, new_call_args_count + 1); - var new_call_args = call_args[0..new_call_args_count]; - const hmr_module_ident = p.newExpr(E.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty); - - new_call_args[0] = p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(p.options.filepath_hash_for_hmr)) }, logger.Loc.Empty); - // This helps us provide better error messages - new_call_args[1] = p.newExpr(E.String{ .data = p.source.path.pretty }, logger.Loc.Empty); - if (p.options.features.react_fast_refresh) { - new_call_args[2] = p.newExpr(E.Identifier{ .ref = p.jsx_refresh_runtime.ref }, logger.Loc.Empty); - } - - var toplevel_stmts_i: u8 = 0; - - var decls = try allocator.alloc(G.Decl, 2 + named_exports_count); - var first_decl = decls[0..2]; - // We cannot rely on import.meta.url because if we import it within a blob: url, it will be nonsensical - // var __hmrModule = new HMRModule(123123124, "/index.js"), __exports = __hmrModule.exports; - const hmr_import_module_ = if (p.options.features.react_fast_refresh) - p.runtime_imports.__FastRefreshModule.? - else - p.runtime_imports.__HMRModule.?; - - const hmr_import_ref = hmr_import_module_.ref; - first_decl[0] = G.Decl{ - .binding = p.b(B.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty), - .value = p.newExpr(E.New{ - .args = ExprNodeList.init(new_call_args), - .target = p.newExpr( - E.Identifier{ - .ref = hmr_import_ref, - }, - logger.Loc.Empty, - ), - .close_parens_loc = logger.Loc.Empty, - }, logger.Loc.Empty), - }; - first_decl[1] = G.Decl{ - .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty), - .value = p.newExpr(E.Dot{ - .target = p.newExpr(E.Identifier{ .ref = p.hmr_module.ref }, logger.Loc.Empty), - .name = "exports", - .name_loc = logger.Loc.Empty, - }, logger.Loc.Empty), - }; - - var export_clauses = try allocator.alloc(js_ast.ClauseItem, named_exports_count); - var named_export_i: usize = 0; - var named_exports_iter = p.named_exports.iterator(); - var export_properties = try allocator.alloc(G.Property, named_exports_count); - - var export_name_string_length: usize = 0; - while (named_exports_iter.next()) |named_export| { - export_name_string_length += named_export.key_ptr.len + "$$hmr_".len; - } - - const export_name_string_all = try allocator.alloc(u8, export_name_string_length); - var export_name_string_remainder = export_name_string_all; - const hmr_module_exports_dot = p.newExpr( - E.Dot{ - .target = hmr_module_ident, - .name = "exports", - .name_loc = logger.Loc.Empty, - }, - logger.Loc.Empty, - ); - var exports_decls = decls[first_decl.len..]; - named_exports_iter = p.named_exports.iterator(); - var update_function_args = try allocator.alloc(G.Arg, 1); - const exports_ident = p.newExpr(E.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty); - update_function_args[0] = G.Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) }; - - while (named_exports_iter.next()) |named_export| { - const named_export_value = named_export.value_ptr.*; - - // Do not try to HMR export {foo} from 'bar'; - if (named_imports.get(named_export_value.ref)) |named_import| { - if (named_import.is_exported) continue; - } - - const named_export_symbol: Symbol = p.symbols.items[named_export_value.ref.innerIndex()]; - - var export_name_string = export_name_string_remainder[0 .. named_export.key_ptr.len + "$$hmr_".len]; - export_name_string_remainder = export_name_string_remainder[export_name_string.len..]; - bun.copy(u8, export_name_string, "$$hmr_"); - bun.copy(u8, export_name_string["$$hmr_".len..], named_export.key_ptr.*); - - const name_ref = try p.declareSymbol(.other, logger.Loc.Empty, export_name_string); - - var body_stmts = export_all_function_body_stmts[named_export_i .. named_export_i + 1]; - body_stmts[0] = p.s( - // was this originally a named import? - // preserve the identifier - S.Return{ .value = if (named_export_symbol.namespace_alias != null) - p.newExpr(E.ImportIdentifier{ - .ref = named_export_value.ref, - .was_originally_identifier = true, - }, logger.Loc.Empty) - else - p.newExpr(E.Identifier{ - .ref = named_export_value.ref, - }, logger.Loc.Empty) }, - logger.Loc.Empty, - ); - export_clauses[named_export_i] = js_ast.ClauseItem{ - .original_name = "", - .alias = named_export.key_ptr.*, - .alias_loc = named_export_value.alias_loc, - .name = .{ .ref = name_ref, .loc = logger.Loc.Empty }, - }; - - const decl_value = p.newExpr( - E.Dot{ .target = hmr_module_exports_dot, .name = named_export.key_ptr.*, .name_loc = logger.Loc.Empty }, - logger.Loc.Empty, - ); - exports_decls[named_export_i] = G.Decl{ - .binding = p.b(B.Identifier{ .ref = name_ref }, logger.Loc.Empty), - .value = decl_value, - }; - - update_function_stmts[named_export_i] = Stmt.assign( - p.newExpr( - E.Identifier{ .ref = name_ref }, - logger.Loc.Empty, - ), - p.newExpr(E.Dot{ - .target = exports_ident, - .name = named_export.key_ptr.*, - .name_loc = logger.Loc.Empty, - }, logger.Loc.Empty), - ); - - export_properties[named_export_i] = G.Property{ - .key = p.newExpr(E.String{ .data = named_export.key_ptr.* }, logger.Loc.Empty), - .value = p.newExpr( - E.Arrow{ - .args = &[_]G.Arg{}, - .body = .{ - .stmts = body_stmts, - .loc = logger.Loc.Empty, - }, - .prefer_expr = true, - }, - logger.Loc.Empty, - ), - }; - named_export_i += 1; - } - var export_all_args = call_args[new_call_args.len..]; - export_all_args[0] = p.newExpr( - E.Object{ .properties = Property.List.init(export_properties[0..named_export_i]) }, - logger.Loc.Empty, - ); - - part_stmts[part_stmts.len - 1] = p.s( - S.SExpr{ - .value = p.newExpr( - E.Call{ - .target = p.newExpr( - E.Dot{ - .target = hmr_module_ident, - .name = "exportAll", - .name_loc = logger.Loc.Empty, - }, - logger.Loc.Empty, - ), - .args = ExprNodeList.init(export_all_args), - }, - logger.Loc.Empty, - ), - }, - logger.Loc.Empty, - ); - - toplevel_stmts[toplevel_stmts_i] = p.s( - S.Local{ - .decls = G.Decl.List.init(first_decl), - }, - logger.Loc.Empty, - ); - - toplevel_stmts_i += 1; - - const is_async = !p.top_level_await_keyword.isEmpty(); - - const func = p.newExpr( - E.Function{ - .func = .{ - .body = .{ .loc = logger.Loc.Empty, .stmts = part_stmts[0 .. part_stmts_i + 1] }, - .name = null, - .open_parens_loc = logger.Loc.Empty, - .flags = Flags.Function.init(.{ - .print_as_iife = true, - .is_async = is_async, - }), - }, - }, - logger.Loc.Empty, - ); - - const call_load = p.newExpr( - E.Call{ - .target = Expr.assign( - p.newExpr( - E.Dot{ - .name = "_load", - .target = hmr_module_ident, - .name_loc = logger.Loc.Empty, - }, - logger.Loc.Empty, - ), - func, - ), - }, - logger.Loc.Empty, - ); - // (__hmrModule._load = function())() - toplevel_stmts[toplevel_stmts_i] = p.s( - S.SExpr{ - .value = if (is_async) - p.newExpr(E.Await{ .value = call_load }, logger.Loc.Empty) - else - call_load, - }, - logger.Loc.Empty, - ); - - toplevel_stmts_i += 1; - - if (has_any_exports) { - if (named_export_i > 0) { - toplevel_stmts[toplevel_stmts_i] = p.s( - S.Local{ - .decls = G.Decl.List.init(exports_decls[0..named_export_i]), - }, - logger.Loc.Empty, - ); - } else { - toplevel_stmts[toplevel_stmts_i] = p.s( - S.Empty{}, - logger.Loc.Empty, - ); - } - - toplevel_stmts_i += 1; - } - - toplevel_stmts[toplevel_stmts_i] = p.s( - S.SExpr{ - .value = Expr.assign( - p.newExpr( - E.Dot{ - .name = "_update", - .target = hmr_module_ident, - .name_loc = logger.Loc.Empty, - }, - logger.Loc.Empty, - ), - p.newExpr( - E.Function{ - .func = .{ - .body = .{ .loc = logger.Loc.Empty, .stmts = if (named_export_i > 0) update_function_stmts[0..named_export_i] else &.{} }, - .name = null, - .args = update_function_args, - .open_parens_loc = logger.Loc.Empty, - }, - }, - logger.Loc.Empty, - ), - ), - }, - logger.Loc.Empty, - ); - toplevel_stmts_i += 1; - if (has_any_exports) { - if (named_export_i > 0) { - toplevel_stmts[toplevel_stmts_i] = p.s( - S.ExportClause{ - .items = export_clauses[0..named_export_i], - }, - logger.Loc.Empty, - ); - } else { - toplevel_stmts[toplevel_stmts_i] = p.s( - S.Empty{}, - logger.Loc.Empty, - ); - } - } - - part.stmts = _stmts[0 .. imports_list.len + toplevel_stmts.len + exports_from.len]; - } - }, + var top_level_stmts = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory(); + parts[0].stmts = top_level_stmts; + top_level_stmts[0] = p.s( + S.SExpr{ + .value = wrapper, + }, + logger.Loc.Empty, + ); + parts.len = 1; } + var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{}; var top_level = &top_level_symbols_to_parts; @@ -23843,10 +23484,13 @@ fn NewParser_( break :brk Ref.None; }; + var parts_list = bun.BabyList(js_ast.Part).init(parts); + parts_list.cap = @intCast(input_parts.len); + return .{ .allocator = p.allocator, .runtime_imports = p.runtime_imports, - .parts = bun.BabyList(js_ast.Part).init(parts), + .parts = parts_list, .module_scope = p.module_scope.*, .symbols = js_ast.Symbol.List.init(p.symbols.items), .exports_ref = p.exports_ref, @@ -23944,6 +23588,317 @@ fn NewParser_( return false; } + const ConvertESMExportsForHmr = struct { + last_part: *js_ast.Part, + imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{}, + export_props: std.ArrayListUnmanaged(G.Property) = .{}, + stmts: std.ArrayListUnmanaged(Stmt) = .{}, + + fn convertStmt(ctx: *ConvertESMExportsForHmr, p: *P, stmt: Stmt) !void { + const new_stmt = switch (stmt.data) { + else => stmt, + .s_local => |st| stmt: { + if (!st.is_export) break :stmt stmt; + + st.is_export = false; + + if (st.kind.isReassignable()) { + for (st.decls.slice()) |decl| { + try ctx.visitBindingForKitModuleExports(p, decl.binding, true); + } + } else { + // TODO: remove this dupe + var dupe_decls = try std.ArrayListUnmanaged(G.Decl).initCapacity(p.allocator, st.decls.len); + + for (st.decls.slice()) |decl| { + bun.assert(decl.value != null); // const must be initialized + + switch (decl.binding.data) { + .b_missing => @panic("binding missing"), + + .b_identifier => |id| { + const symbol = p.symbols.items[id.ref.inner_index]; + + // if the symbol is not used, we don't need to preserve + // a binding in this scope. we can move it to the exports object. + if (symbol.use_count_estimate != 0 or !decl.value.?.canBeMoved()) { + dupe_decls.appendAssumeCapacity(decl); + } + + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ .data = symbol.original_name }, decl.binding.loc), + .value = decl.value, + }); + }, + + else => { + dupe_decls.appendAssumeCapacity(decl); + try ctx.visitBindingForKitModuleExports(p, decl.binding, false); + }, + } + } + + if (dupe_decls.items.len == 0) { + return; + } + + st.decls = G.Decl.List.fromList(dupe_decls); + } + + break :stmt stmt; + }, + .s_export_default => |st| stmt: { + // Simple case: we can move this to the default property of the exports object + if (st.canBeMoved()) { + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc), + .value = st.value.toExpr(), + }); + // no statement emitted + return; + } + + // Otherwise, we need a temporary + const temp_id = p.generateTempRef("default_export"); + try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true }); + try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 }); + try p.module_scope.generated.push(p.allocator, temp_id); + + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc), + .value = Expr.initIdentifier(temp_id, stmt.loc), + }); + + break :stmt Stmt.alloc(S.Local, .{ + .kind = .k_const, + .decls = try G.Decl.List.fromSlice(p.allocator, &.{ + .{ + .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc), + .value = st.value.toExpr(), + }, + }), + }, stmt.loc); + }, + .s_class => |st| stmt: { + // Strip the "export" keyword + if (!st.is_export) break :stmt stmt; + + // Export as CommonJS + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ + .data = p.symbols.items[st.class.class_name.?.ref.?.inner_index].original_name, + }, stmt.loc), + .value = Expr.initIdentifier(st.class.class_name.?.ref.?, stmt.loc), + }); + + st.is_export = false; + + break :stmt stmt; + }, + .s_function => |st| stmt: { + // Strip the "export" keyword + if (!st.func.flags.contains(.is_export)) break :stmt stmt; + + st.func.flags.remove(.is_export); + + // Export as CommonJS + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ + .data = p.symbols.items[st.func.name.?.ref.?.inner_index].original_name, + }, stmt.loc), + .value = Expr.initIdentifier(st.func.name.?.ref.?, stmt.loc), + }); + + break :stmt stmt; + }, + .s_export_clause => |st| { + for (st.items) |item| { + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ + .data = item.alias, + }, stmt.loc), + .value = Expr.initIdentifier(item.name.ref.?, item.name.loc), + }); + } + + return; // do not emit a statement here + }, + + .s_export_from => |st| { + _ = st; // autofix + @panic("TODO s_export_from"); + }, + .s_export_star => |st| { + _ = st; // autofix + @panic("TODO s_export_star"); + }, + + // De-duplicate import statements. It is okay to disregard + // named/default imports here as we always rewrite them as + // full qualified property accesses (need to so live-bindings) + .s_import => |st| stmt: { + const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index); + if (gop.found_existing) return; + break :stmt stmt; + }, + }; + + try ctx.stmts.append(p.allocator, new_stmt); + } + + fn visitBindingForKitModuleExports( + ctx: *ConvertESMExportsForHmr, + p: *P, + binding: Binding, + is_live_binding: bool, + ) !void { + switch (binding.data) { + .b_missing => @panic("missing!"), + .b_identifier => |id| { + try ctx.visitRefForKitModuleExports(p, id.ref, binding.loc, is_live_binding); + }, + .b_array => |array| { + for (array.items) |item| { + try ctx.visitBindingForKitModuleExports(p, item.binding, is_live_binding); + } + }, + .b_object => |object| { + for (object.properties) |item| { + try ctx.visitBindingForKitModuleExports(p, item.value, is_live_binding); + } + }, + } + } + + fn visitRefForKitModuleExports( + ctx: *ConvertESMExportsForHmr, + p: *P, + ref: Ref, + loc: logger.Loc, + is_live_binding: bool, + ) !void { + const symbol = p.symbols.items[ref.inner_index]; + const id = Expr.initIdentifier(ref, loc); + if (is_live_binding) { + const key = Expr.init(E.String, .{ + .data = symbol.original_name, + }, loc); + + // This is technically incorrect in that we've marked this as a + // top level symbol. but all we care about is preventing name + // collisions, not necessarily the best minificaiton (dev only) + const arg1 = p.generateTempRef(symbol.original_name); + try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true }); + try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 }); + try p.module_scope.generated.push(p.allocator, arg1); + + // Live bindings need to update the value internally and externally. + // 'get abc() { return abc }' + try ctx.export_props.append(p.allocator, .{ + .kind = .get, + .key = key, + .value = Expr.init(E.Function, .{ .func = .{ + .body = .{ + .stmts = try p.allocator.dupe(Stmt, &.{ + Stmt.alloc(S.Return, .{ .value = id }, loc), + }), + .loc = loc, + }, + } }, loc), + }); + // 'set abc(abc2) { abc = abc2 }' + try ctx.export_props.append(p.allocator, .{ + .kind = .set, + .key = key, + .value = Expr.init(E.Function, .{ .func = .{ + .args = try p.allocator.dupe(G.Arg, &.{.{ + .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = arg1 }, loc), + }}), + .body = .{ + .stmts = try p.allocator.dupe(Stmt, &.{ + Stmt.alloc(S.SExpr, .{ + .value = Expr.assign(id, Expr.initIdentifier(arg1, loc)), + }, loc), + }), + .loc = loc, + }, + } }, loc), + }); + } else { + // 'abc,' + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ + .data = symbol.original_name, + }, loc), + .value = id, + }); + } + } + + pub fn finalize(ctx: *ConvertESMExportsForHmr, p: *P, all_parts: []js_ast.Part) ![]js_ast.Part { + if (ctx.export_props.items.len > 0) { + // add a marker for the client runtime to tell that this is an ES module + try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{ + .value = Expr.assign( + Expr.init(E.Dot, .{ + .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty), + .name = "__esModule", + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty), + Expr.init(E.Boolean, .{ .value = true }, logger.Loc.Empty), + ), + }, logger.Loc.Empty)); + + try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{ + .value = Expr.assign( + Expr.init(E.Dot, .{ + .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty), + .name = "exports", + .name_loc = logger.Loc.Empty, + }, logger.Loc.Empty), + Expr.init(E.Object, .{ + .properties = G.Property.List.fromList(ctx.export_props), + }, logger.Loc.Empty), + ), + }, logger.Loc.Empty)); + + // mark a dependency on module_ref so it is renamed + try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 }); + try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true }); + } + + // TODO: this is a tiny mess. it is honestly trying to hard to merge all parts into one + for (all_parts[0 .. all_parts.len - 1]) |*part| { + try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols); + try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice()); + for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| { + const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k); + if (!gop.found_existing) { + gop.value_ptr.* = v; + } else { + gop.value_ptr.count_estimate += v.count_estimate; + } + } + part.stmts = &.{}; + part.declared_symbols.entries.len = 0; + part.tag = .dead_due_to_inlining; + part.dependencies.clearRetainingCapacity(); + try part.dependencies.push(p.allocator, .{ + .part_index = @intCast(all_parts.len - 1), + .source_index = p.source.index, + }); + } + + try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items); + try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols); + + ctx.last_part.stmts = ctx.stmts.items; + ctx.last_part.tag = .none; + + return all_parts; + } + }; + pub fn init( allocator: Allocator, log: *logger.Log, @@ -23956,9 +23911,9 @@ fn NewParser_( var scope_order = try ScopeOrderList.initCapacity(allocator, 1); const scope = try allocator.create(Scope); scope.* = Scope{ - .members = @TypeOf(scope.members){}, - .children = @TypeOf(scope.children){}, - .generated = @TypeOf(scope.generated){}, + .members = .{}, + .children = .{}, + .generated = .{}, .kind = .entry, .label_ref = null, .parent = null, @@ -23974,7 +23929,6 @@ fn NewParser_( .call_target = nullExprData, .delete_target = nullExprData, .stmt_expr_value = nullExprData, - .expr_list = .{}, .loop_body = nullStmtData, .define = define, .import_records = undefined, @@ -24136,10 +24090,9 @@ pub fn newLazyExportAST( return result.ast; } -const CommonJSWrapper = union(enum) { - none: void, - bun_dev: Expr, - bun_js: void, +const WrapMode = enum { + none, + bun_commonjs, }; /// Equivalent of esbuild's js_ast_helpers.ToInt32 diff --git a/src/js_printer.zig b/src/js_printer.zig index a318eb46f5..976a29ddd7 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -529,6 +529,7 @@ pub const Options = struct { target: options.Target = .browser, runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = null, + input_files_for_kit: ?[]logger.Source = null, commonjs_named_exports: js_ast.Ast.CommonJSNamedExports = .{}, commonjs_named_exports_deoptimized: bool = false, @@ -547,7 +548,7 @@ pub const Options = struct { require_or_import_meta_for_source_callback: RequireOrImportMeta.Callback = .{}, - module_type: options.OutputFormat = .preserve, + module_type: options.Format = .esm, // /// Used for cross-module inlining of import items when bundling // const_values: Ast.ConstValuesMap = .{}, @@ -1016,10 +1017,6 @@ fn NewPrinter( } } - pub inline fn unsafePrint(p: *Printer, str: string) void { - p.print(str); - } - pub inline fn unindent(p: *Printer) void { p.options.indent.count -|= 1; } @@ -2035,7 +2032,17 @@ fn NewPrinter( p.print("("); } - if (!meta.was_unwrapped_require) { + if (p.options.input_files_for_kit) |input_files| { + bun.assert(p.options.module_type == .internal_kit_dev); + p.printSpaceBeforeIdentifier(); + p.printSymbol(p.options.commonjs_module_ref); + p.print(".require("); + { + const path = input_files[record.source_index.get()].path; + p.printInlinedEnum(.{ .number = @floatFromInt(path.hashForKit()) }, path.pretty, level); + } + p.print(")"); + } else if (!meta.was_unwrapped_require) { // Call the wrapper if (meta.wrapper_ref.isValid()) { p.printSpaceBeforeIdentifier(); @@ -2321,7 +2328,10 @@ fn NewPrinter( .e_import_meta => { p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); - if (!p.options.import_meta_ref.isValid()) { + if (p.options.module_type == .internal_kit_dev) { + p.printSymbol(p.options.commonjs_module_ref); + p.print(".importMeta()"); + } else if (!p.options.import_meta_ref.isValid()) { // Most of the time, leave it in there p.print("import.meta"); } else { @@ -2349,6 +2359,8 @@ fn NewPrinter( } p.print("import.meta.main"); } else { + bun.assert(p.options.module_type != .internal_kit_dev); + p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); @@ -3092,7 +3104,10 @@ fn NewPrinter( // Potentially use a property access instead of an identifier var didPrint = false; - const ref = p.symbols().follow(e.ref); + const ref = if (p.options.module_type != .internal_kit_dev) + p.symbols().follow(e.ref) + else + e.ref; const symbol = p.symbols().get(ref).?; if (symbol.import_item_status == .missing) { @@ -3176,6 +3191,7 @@ fn NewPrinter( // } if (!didPrint) { + // assert(p.options.module_type != .internal_kit_dev); p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); p.printSymbol(e.ref); @@ -3972,9 +3988,6 @@ fn NewPrinter( } p.print("}"); }, - else => { - Output.panic("Unexpected binding of type {any}", .{binding}); - }, } } @@ -4863,7 +4876,9 @@ fn NewPrinter( p.print("{"); if (!s.is_single_line) { - p.unindent(); + p.indent(); + } else { + p.printSpace(); } for (s.items, 0..) |item, i| { @@ -4886,6 +4901,8 @@ fn NewPrinter( p.unindent(); p.printNewline(); p.printIndent(); + } else { + p.printSpace(); } p.print("}"); item_count += 1; @@ -5165,7 +5182,7 @@ fn NewPrinter( } inline fn printDisabledImport(p: *Printer) void { - p.print("(()=>({}))"); + p.printWhitespacer(ws("(() => ({}))")); } pub fn printLoadFromBundleWithoutCall(p: *Printer, import_record_index: u32) void { @@ -5758,12 +5775,6 @@ pub fn NewWriter( } pub inline fn print(writer: *Self, comptime ValueType: type, str: ValueType) void { - if (FeatureFlags.disable_printing_null) { - if (str == 0) { - Output.panic("Attempted to print null char", .{}); - } - } - switch (ValueType) { comptime_int, u16, u8 => { const written = writeByte(&writer.ctx, @as(u8, @intCast(str))) catch |err| brk: { @@ -6419,15 +6430,30 @@ pub fn printWithWriterAndPlatform( imported_module_ids_list = printer.imported_module_ids; } - for (parts) |part| { - for (part.stmts) |stmt| { - printer.printStmt(stmt) catch |err| { - return .{ .err = err }; - }; - if (printer.writer.getError()) {} else |err| { - return .{ .err = err }; + if (opts.module_type == .internal_kit_dev) { + printer.indent(); + printer.printIndent(); + printer.fmt("{d}", .{source.path.hashForKit()}) catch bun.outOfMemory(); + printer.print(": function"); + printer.printFunc(parts[0].stmts[0].data.s_expr.value.data.e_function.func); + printer.print(",\n"); + } else { + // The IIFE wrapper is done in `postProcessJSChunk`, so we just manually + // trigger an indent. + if (opts.module_type == .iife) { + printer.indent(); + } + + for (parts) |part| { + for (part.stmts) |stmt| { + printer.printStmt(stmt) catch |err| { + return .{ .err = err }; + }; + if (printer.writer.getError()) {} else |err| { + return .{ .err = err }; + } + printer.printSemicolonIfNeeded(); } - printer.printSemicolonIfNeeded(); } } diff --git a/src/kit/DevServer.zig b/src/kit/DevServer.zig new file mode 100644 index 0000000000..804a8803b4 --- /dev/null +++ b/src/kit/DevServer.zig @@ -0,0 +1,925 @@ +//! Instance of the development server. Controls an event loop, web server, +//! bundling threads, and JavaScript VM instance. All data is held in memory. +//! +//! Currently does not have a `deinit()`, as it is assumed to be alive for the +//! remainder of this process' lifespan. +pub const DevServer = @This(); + +pub const Options = struct { + cwd: []u8, + routes: []Route, + listen_config: uws.AppListenConfig = .{ .port = 3000 }, + dump_sources: ?[]const u8 = if (Environment.isDebug) ".kit-debug" else null, + verbose_watcher: bool = false, + // TODO: make it possible to inherit a js VM +}; + +/// Accepting a custom allocator for all of DevServer would be misleading +/// as there are many functions which will use default_allocator. +const default_allocator = bun.default_allocator; + +cwd: []const u8, +dump_dir: ?std.fs.Dir, + +// UWS App +app: *App, +routes: []Route, +address: struct { + port: u16, + hostname: [*:0]const u8, +}, +listener: ?*App.ListenSocket, + +// Server Runtime +server_global: *DevGlobalObject, +vm: *VirtualMachine, + +// Bundling +bundle_thread: BundleThread, + +// // Watch + HMR +// bun_watcher: *HotReloader.Watcher, +/// Required by `bun.JSC.NewHotReloader` +bundler: Bundler, +/// Required by `Bundler` +log_do_not_use: Log, + +pub const internal_prefix = "/_bun"; +pub const client_prefix = internal_prefix ++ "/client"; + +pub const Route = struct { + // Config + pattern: [:0]const u8, + entry_point: []const u8, + + server_bundle: BundlePromise(ServerBundle) = .unqueued, + client_bundle: BundlePromise(ClientBundle) = .unqueued, + + /// Assigned in DevServer.init + dev: *DevServer = undefined, + client_bundled_url: []u8 = undefined, + + pub fn clientPublicPath(route: *const Route) []const u8 { + return route.client_bundled_url[0 .. route.client_bundled_url.len - "/client.js".len]; + } +}; + +/// Prepared server-side bundle and loaded JavaScript module +const ServerBundle = struct { + files: []OutputFile, + server_request_callback: JSC.JSValue, +}; + +/// Preparred client-side bundle. +/// Namespaced to URL: `/_bun/client/:route_index/:file_path` +const ClientBundle = struct { + files: []OutputFile, + /// Indexes into this are indexes into `files`. + /// This is case insensitive because URL paths should be case insensitive. + files_index: bun.CaseInsensitiveASCIIStringArrayHashMapUnmanaged(void), + + pub fn getFile(bundle: *ClientBundle, filename: []const u8) ?*OutputFile { + return if (bundle.files_index.getIndex(filename)) |i| + &bundle.files[i] + else + null; + } +}; + +pub fn init(options: Options) *DevServer { + if (JSC.VirtualMachine.VMHolder.vm != null) + @panic("Cannot initialize kit.DevServer on a thread with an active JSC.VirtualMachine"); + + const dump_dir = if (options.dump_sources) |dir| + std.fs.cwd().makeOpenPath(dir, .{}) catch |err| dir: { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not open directory for dumping sources: {}", .{err}); + break :dir null; + } + else + null; + + const app = App.create(.{}); + + const dev = bun.new(DevServer, .{ + .cwd = options.cwd, + .app = app, + .routes = options.routes, + .address = .{ + .port = @intCast(options.listen_config.port), + .hostname = options.listen_config.host orelse "localhost", + }, + .listener = null, + .bundle_thread = BundleThread.uninitialized, + .server_global = undefined, + .vm = undefined, + .dump_dir = dump_dir, + // .bun_watcher = undefined, + .bundler = undefined, + .log_do_not_use = Log.init(bun.failing_allocator), + }); + + dev.bundler = bun.Bundler.init( + default_allocator, + &dev.log_do_not_use, + std.mem.zeroes(bun.Schema.Api.TransformOptions), + null, // TODO: + ) catch bun.outOfMemory(); + + const loaders = bun.options.loadersFromTransformOptions(default_allocator, null, .bun) catch + bun.outOfMemory(); + + dev.bundler.options = .{ + .entry_points = &.{}, + .define = dev.bundler.options.define, + .loaders = loaders, + .log = &dev.log_do_not_use, + .output_dir = "", // this disables filesystem output + .output_format = .internal_kit_dev, + .out_extensions = bun.StringHashMap([]const u8).init(bun.failing_allocator), + + // unused by all code + .resolve_mode = .dev, + // technically used (in macro) but should be removed + .transform_options = std.mem.zeroes(bun.Schema.Api.TransformOptions), + }; + dev.bundler.configureLinker(); + dev.bundler.resolver.opts = dev.bundler.options; + + // const fs = bun.fs.FileSystem.init(options.cwd) catch @panic("Failed to init FileSystem"); + // dev.bun_watcher = HotReloader.init(dev, fs, options.verbose_watcher, false); + // dev.bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + + dev.vm = VirtualMachine.initKit(.{ + .allocator = default_allocator, + .args = std.mem.zeroes(bun.Schema.Api.TransformOptions), + }) catch |err| + Output.panic("Failed to create Global object: {}", .{err}); + dev.server_global = c.KitCreateDevGlobal(dev, dev.vm.console); + dev.vm.global = dev.server_global.js(); + dev.vm.regular_event_loop.global = dev.vm.global; + dev.vm.jsc = dev.vm.global.vm(); + dev.vm.event_loop.ensureWaker(); + + _ = JSC.WorkPool.get(); + const thread = dev.bundle_thread.spawn() catch |err| + Output.panic("Failed to spawn bundler thread: {}", .{err}); + thread.detach(); + + var has_fallback = false; + + for (options.routes, 0..) |*route, i| { + app.any(route.pattern, *Route, route, onServerRequestInit); + + route.dev = dev; + route.client_bundled_url = std.fmt.allocPrint( + default_allocator, + client_prefix ++ "/{d}/client.js", + .{i}, + ) catch bun.outOfMemory(); + + if (bun.strings.eqlComptime(route.pattern, "/*")) + has_fallback = true; + } + + app.get(client_prefix ++ "/:route/:asset", *DevServer, dev, onAssetRequestInit); + + app.ws( + internal_prefix ++ "/hmr", + dev, + 0, + uws.WebSocketBehavior.Wrap(DevServer, DevWebSocket, false).apply(.{}), + ); + + if (!has_fallback) + app.any("/*", void, {}, onFallbackRoute); + + app.listenWithConfig(*DevServer, dev, onListen, options.listen_config); + + return dev; +} + +pub fn runLoopForever(dev: *DevServer) noreturn { + const lock = dev.vm.jsc.getAPILock(); + defer lock.release(); + + while (true) { + dev.vm.tick(); + dev.vm.eventLoop().autoTickActive(); + } +} + +// uws handlers + +fn onListen(ctx: *DevServer, maybe_listen: ?*App.ListenSocket) void { + const listen: *App.ListenSocket = maybe_listen orelse { + @panic("TODO: handle listen failure"); + }; + + ctx.listener = listen; + ctx.address.port = @intCast(listen.getLocalPort()); + + Output.prettyErrorln("--\\> http://{s}:{d}\n", .{ + bun.span(ctx.address.hostname), + ctx.address.port, + }); + Output.flush(); +} + +fn onAssetRequestInit(dev: *DevServer, req: *Request, resp: *Response) void { + const route = route: { + const route_id = req.parameter(0); + const i = std.fmt.parseInt(u16, route_id, 10) catch + return req.setYield(true); + if (i >= dev.routes.len) + return req.setYield(true); + break :route &dev.routes[i]; + }; + const asset_name = req.parameter(1); + dev.getOrEnqueueBundle(resp, route, .client, .{ .file_name = asset_name }); +} + +fn onServerRequestInit(route: *Route, req: *Request, resp: *Response) void { + _ = req; + route.dev.getOrEnqueueBundle(resp, route, .server, .{}); +} + +// uws with bundle handlers + +fn onAssetRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.client.Context(), bundle: *ClientBundle) void { + _ = route; + + const file = bundle.getFile(ctx.file_name) orelse + return sendBuiltInNotFound(resp); + + sendOutputFile(file, resp); +} + +fn onServerRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.server.Context(), bundle: *ServerBundle) void { + _ = ctx; // autofix + const dev = route.dev; + const global = dev.server_global.js(); + + const context = JSValue.createEmptyObject(global, 1); + context.put( + dev.server_global.js(), + bun.String.static("clientEntryPoint"), + bun.String.init(route.client_bundled_url).toJS(global), + ); + + const result = bundle.server_request_callback.call( + global, + .undefined, + &.{context}, + ) catch |err| { + const exception = global.takeException(err); + const fail: Failure = .{ .request_handler = exception }; + fail.printToConsole(route, .server); + fail.sendAsHttpResponse(resp, route, .server); + return; + }; + + // TODO: This interface and implementation is very poor. but fine until API + // considerations become important (as of writing, there are 3 dozen todo + // items before it) + // + // It probably should use code from `server.zig`, but most importantly it should + // not have a tie to DevServer, but instead be generic with a context structure + // containing just a *uws.App, *JSC.EventLoop, and JSValue response object. + // + // This would allow us to support all of the nice things `new Response` allows + + const bun_string = result.toBunString(dev.server_global.js()); + if (bun_string.tag == .Dead) @panic("TODO NOT STRING"); + defer bun_string.deref(); + + const utf8 = bun_string.toUTF8(default_allocator); + defer utf8.deinit(); + + resp.writeStatus("200 OK"); + resp.writeHeader("Content-Type", MimeType.html.value); + resp.end(utf8.slice(), true); // TODO: You should never call res.end(huge buffer) +} + +fn onFallbackRoute(_: void, _: *Request, resp: *Response) void { + sendBuiltInNotFound(resp); +} + +// http helper functions + +fn sendOutputFile(file: *const OutputFile, resp: *Response) void { + switch (file.value) { + .buffer => |buffer| { + if (buffer.bytes.len == 0) { + resp.writeStatus("202 No Content"); + resp.writeHeaderInt("Content-Length", 0); + resp.end("", true); + return; + } + + resp.writeStatus("200 OK"); + // TODO: CSS, Sourcemap + resp.writeHeader("Content-Type", MimeType.javascript.value); + resp.end(buffer.bytes, true); // TODO: You should never call res.end(huge buffer) + }, + else => |unhandled_tag| Output.panic("TODO: unhandled tag .{s}", .{@tagName(unhandled_tag)}), + } +} + +fn sendBuiltInNotFound(resp: *Response) void { + const message = "404 Not Found"; + resp.writeStatus("404 Not Found"); + resp.end(message, true); +} + +// bundling + +const BundleKind = enum { + client, + server, + + fn Bundle(kind: BundleKind) type { + return switch (kind) { + .client => ClientBundle, + .server => ServerBundle, + }; + } + + /// Routing information from uws.Request is stack allocated. + /// This union has no type tag because it can be inferred from surrounding data. + fn Context(kind: BundleKind) type { + return switch (kind) { + .client => struct { file_name: []const u8 }, + .server => struct {}, + }; + } + + inline fn completionFunction(comptime kind: BundleKind) fn (*Route, *Response, kind.Context(), *kind.Bundle()) void { + return switch (kind) { + .client => onAssetRequestWithBundle, + .server => onServerRequestWithBundle, + }; + } + + const AnyContext: type = @Type(.{ + .Union = .{ + .layout = .auto, + .tag_type = null, + .fields = &fields: { + const values = std.enums.values(BundleKind); + var fields: [values.len]std.builtin.Type.UnionField = undefined; + for (&fields, values) |*field, kind| { + field.* = .{ + .name = @tagName(kind), + .type = kind.Context(), + .alignment = @alignOf(kind.Context()), + }; + } + break :fields fields; + }, + .decls = &.{}, + }, + }); + + inline fn initAnyContext(comptime kind: BundleKind, data: kind.Context()) AnyContext { + return @unionInit(AnyContext, @tagName(kind), data); + } +}; + +/// This will either immediately call `kind.completionFunction()`, or schedule a +/// task to call it when the bundle is ready. The completion function is allowed +/// to use yield. +fn getOrEnqueueBundle( + dev: *DevServer, + resp: *Response, + route: *Route, + comptime kind: BundleKind, + ctx: kind.Context(), +) void { + // const bundler = &dev.bundler; + const bundle = switch (kind) { + .client => &route.client_bundle, + .server => &route.server_bundle, + }; + + switch (bundle.*) { + .unqueued => { + // TODO: use an object pool for this. `bun.ObjectPool` needs a refactor before it can be used + const cb = BundleTask.DeferredRequest.newNode(resp, kind.initAnyContext(ctx)); + + const task = bun.new(BundleTask, .{ + .owner = dev, + .route = route, + .kind = kind, + .plugins = null, + .handlers = .{ .first = cb }, + }); + bundle.* = .{ .pending = task }; + dev.bundle_thread.enqueue(task); + }, + .pending => |task| { + const cb = BundleTask.DeferredRequest.newNode(resp, kind.initAnyContext(ctx)); + // This is not a data race, since this list is drained on + // the same thread as this function is called. + task.handlers.prepend(cb); + }, + .failed => |fail| { + fail.sendAsHttpResponse(resp, route, kind); + }, + .value => |*val| { + kind.completionFunction()(route, resp, ctx, val); + }, + } +} + +const BundleThread = bun.bundle_v2.BundleThread(BundleTask); + +/// A request to bundle something for development. Has one or more pending HTTP requests. +pub const BundleTask = struct { + owner: *DevServer, + route: *Route, + kind: BundleKind, + // env: *bun.DotEnv.Loader, // TODO + plugins: ?*JSC.API.JSBundler.Plugin, + handlers: DeferredRequest.List, + + next: ?*BundleTask = null, + result: BundleV2.Result = .{ .pending = {} }, + + // initialized in the task itself: + concurrent_task: JSC.EventLoopTask = undefined, + bundler: *BundleV2 = undefined, + log: Log = undefined, + + /// There is no function pointer, route, or context on this struct as all of + /// this information is inferable from the associated BundleTask + const DeferredRequest = struct { + /// When cancelled, this is set to null + resp: ?*Response, + /// Only valid if req is non-null + ctx: BundleKind.AnyContext, + + fn newNode(resp: *Response, ctx: BundleKind.AnyContext) *DeferredRequest.List.Node { + const node = bun.new(DeferredRequest.List.Node, .{ + .data = .{ + .resp = resp, + .ctx = ctx, + }, + }); + resp.onAborted(*DeferredRequest, onCancel, &node.data); + return node; + } + + fn onCancel(node: *DeferredRequest, resp: *Response) void { + node.resp = null; + node.ctx = undefined; + _ = resp; + } + + const List = std.SinglyLinkedList(DeferredRequest); + }; + + pub fn completeOnMainThread(task: *BundleTask) void { + switch (task.kind) { + inline else => |kind| task.completeOnMainThreadWithKind(kind), + } + } + + fn completeOnMainThreadWithKind(task: *BundleTask, comptime kind: BundleKind) void { + const route = task.route; + const bundle = switch (kind) { + .client => &route.client_bundle, + .server => &route.server_bundle, + }; + + assert(bundle.* == .pending); + + if (task.result == .err) { + const fail = Failure.fromLog(&task.log); + fail.printToConsole(route, kind); + task.finishHttpRequestsFailure(&fail); + bundle.* = .{ .failed = fail }; + return; + } + + if (task.log.hasAny()) { + Output.warn("Warnings {s} for {s}", .{ + @tagName(task.kind), + route.pattern, + }); + task.log.printForLogLevel(Output.errorWriter()) catch {}; + } + + const files = task.result.value.output_files.items; + bun.assert(files.len > 0); + + const dev = route.dev; + if (dev.dump_dir) |dump_dir| { + dumpBundle(dump_dir, route, kind, files) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not dump bundle: {}", .{err}); + }; + } + + switch (kind) { + .client => { + // Set the capacity to the exact size required to avoid over-allocation + var files_index: bun.CaseInsensitiveASCIIStringArrayHashMapUnmanaged(void) = .{}; + files_index.entries.setCapacity(default_allocator, files.len) catch bun.outOfMemory(); + files_index.entries.len = files.len; + for (files_index.keys(), files) |*index_key, file| { + var dest_path = file.dest_path; + if (bun.strings.hasPrefixComptime(dest_path, "./")) { + dest_path = dest_path[2..]; + } + index_key.* = dest_path; + } + files_index.reIndex(default_allocator) catch bun.outOfMemory(); + + bundle.* = .{ .value = .{ + .files = files, + .files_index = files_index, + } }; + }, + .server => { + const entry_point = files[0]; + const code = entry_point.value.buffer.bytes; + + const server_code = c.KitLoadServerCode(dev.server_global, bun.String.createLatin1(code)); + dev.vm.waitForPromise(.{ .internal = server_code.promise }); + + switch (server_code.promise.unwrap(dev.vm.jsc, .mark_handled)) { + .pending => unreachable, // promise is settled + .rejected => |err| { + const fail = Failure.fromJSServerLoad(err, dev.server_global.js()); + fail.printToConsole(task.route, .server); + task.finishHttpRequestsFailure(&fail); + bundle.* = .{ .failed = fail }; + return; + }, + .fulfilled => |v| bun.assert(v == .undefined), + } + + const handler = c.KitGetRequestHandlerFromModule(dev.server_global, server_code.key); + + if (!handler.isCallable(dev.vm.jsc)) { + @panic("TODO: handle not callable"); + } + + bundle.* = .{ .value = .{ + .files = files, + .server_request_callback = handler, + } }; + }, + } + + task.finishHttpRequestsSuccess(kind, &bundle.value); + } + + fn finishHttpRequestsSuccess(task: *BundleTask, comptime kind: BundleKind, bundle: *kind.Bundle()) void { + const func = comptime kind.completionFunction(); + + while (task.handlers.popFirst()) |node| { + defer bun.destroy(node); + if (node.data.resp) |resp| { + func(task.route, resp, @field(node.data.ctx, @tagName(kind)), bundle); + } + } + } + + fn finishHttpRequestsFailure(task: *BundleTask, failure: *const Failure) void { + while (task.handlers.popFirst()) |node| { + defer bun.destroy(node); + if (node.data.resp) |resp| { + failure.sendAsHttpResponse(resp, task.route, task.kind); + } + } + } + + pub fn configureBundler(task: *BundleTask, bundler: *Bundler, allocator: Allocator) !void { + const dev = task.route.dev; + + bundler.* = try bun.Bundler.init( + allocator, + &task.log, + std.mem.zeroes(bun.Schema.Api.TransformOptions), + null, // TODO: + ); + + const define = bundler.options.define; + bundler.options = dev.bundler.options; + + bundler.options.define = define; + bundler.options.entry_points = (&task.route.entry_point)[0..1]; + bundler.options.log = &task.log; + bundler.options.output_dir = ""; // this disables filesystem outpu; + bundler.options.output_format = .internal_kit_dev; + bundler.options.out_extensions = bun.StringHashMap([]const u8).init(bundler.allocator); + bundler.options.react_fast_refresh = task.kind == .client; + + bundler.options.public_path = switch (task.kind) { + .client => task.route.clientPublicPath(), + .server => task.route.dev.cwd, + }; + bundler.options.target = switch (task.kind) { + .client => .browser, + .server => .bun, + }; + bundler.options.entry_naming = switch (task.kind) { + // Always name it "client.{js/css}" so that the server can know + // the entry-point script without waiting on a client bundle. + .client => "client.[ext]", + // For uniformity + .server => "server.[ext]", + }; + bundler.options.tree_shaking = false; + bundler.options.minify_syntax = true; + + bundler.configureLinker(); + try bundler.configureDefines(); + + // The following are from Vite: https://vitejs.dev/guide/env-and-mode + // TODO: MODE, BASE_URL + try bundler.options.define.insert( + allocator, + "import.meta.env.DEV", + Define.Data.initBoolean(true), + ); + try bundler.options.define.insert( + allocator, + "import.meta.env.PROD", + Define.Data.initBoolean(false), + ); + try bundler.options.define.insert( + allocator, + "import.meta.env.SSR", + Define.Data.initBoolean(task.kind == .server), + ); + + bundler.resolver.opts = bundler.options; + bundler.resolver.watcher = dev.bundler.resolver.watcher; + } + + pub fn completeMini(task: *BundleTask, _: *void) void { + task.completeOnMainThread(); + } + + pub fn completeOnBundleThread(task: *BundleTask) void { + task.route.dev.vm.event_loop.enqueueTaskConcurrent(task.concurrent_task.js.from(task, .manual_deinit)); + } +}; + +/// Bundling should be concurrent, deduplicated, and cached. +/// This acts as a sort of "native promise" +fn BundlePromise(T: type) type { + return union(enum) { + unqueued, + pending: *BundleTask, + failed: Failure, + value: T, + }; +} + +/// Represents an error from loading or server sided runtime. Information on +/// what this error is from, such as the associated Route, is inferred from +/// surrounding context. +/// +/// In the case a route was not able to fully compile, the `Failure` is stored +/// so that a browser refreshing the page can display this failure. +const Failure = union(enum) { + /// Bundler and module resolution use `bun.logger` to report multiple errors at once. + bundler: std.ArrayList(bun.logger.Msg), + /// Thrown JavaScript exception while loading server code. + server_load: JSC.Strong, + /// Never stored; the current request handler threw an error. + request_handler: JSValue, + + /// Consumes the Log data, resetting it. + pub fn fromLog(log: *Log) Failure { + const fail: Failure = .{ .bundler = log.msgs }; + log.* = .{ + .msgs = std.ArrayList(bun.logger.Msg).init(log.msgs.allocator), + .level = log.level, + }; + return fail; + } + + pub fn fromJSServerLoad(js: JSValue, global: *JSC.JSGlobalObject) Failure { + return .{ .server_load = JSC.Strong.create(js, global) }; + } + + // TODO: deduplicate the two methods here. that isnt trivial because one has to + // style with ansi codes, and the other has to style with HTML. + + fn printToConsole(fail: *const Failure, route: *const Route, kind: BundleKind) void { + defer Output.flush(); + + Output.prettyErrorln("", .{}); + + switch (fail.*) { + .bundler => |msgs| { + Output.prettyErrorln("Errors while bundling {s}-side for '{s}'", .{ + @tagName(kind), + route.pattern, + }); + Output.flush(); + + var log: Log = .{ .msgs = msgs, .errors = 1, .level = .err }; + log.printForLogLevelColorsRuntime( + Output.errorWriter(), + Output.enable_ansi_colors_stderr, + ) catch {}; + }, + .server_load => |strong| { + Output.prettyErrorln("Server route handler for '{s}' threw while loading", .{ + route.pattern, + }); + Output.flush(); + + const err = strong.get() orelse unreachable; + route.dev.vm.printErrorLikeObjectToConsole(err); + }, + .request_handler => |err| { + Output.prettyErrorln("Request to handler '{s}' failed SSR", .{ + route.pattern, + }); + Output.flush(); + + route.dev.vm.printErrorLikeObjectToConsole(err); + }, + } + } + + fn sendAsHttpResponse(fail: *const Failure, resp: *Response, route: *const Route, kind: BundleKind) void { + resp.writeStatus("500 Internal Server Error"); + var buffer: [32768]u8 = undefined; + + const message = message: { + var fbs = std.io.fixedBufferStream(&buffer); + const writer = fbs.writer(); + + switch (fail.*) { + .bundler => |msgs| { + writer.print("Errors while bundling {s}-side for '{s}'\n\n", .{ + @tagName(kind), + route.pattern, + }) catch break :message null; + + var log: Log = .{ .msgs = msgs, .errors = 1, .level = .err }; + log.printForLogLevelWithEnableAnsiColors(writer, false) catch + break :message null; + }, + .server_load => |strong| { + writer.print("Server route handler for '{s}' threw while loading\n\n", .{ + route.pattern, + }) catch break :message null; + const err = strong.get() orelse unreachable; + route.dev.vm.printErrorLikeObjectSimple(err, writer, false); + }, + .request_handler => |err| { + writer.print("Server route handler for '{s}' threw while loading\n\n", .{ + route.pattern, + }) catch break :message null; + route.dev.vm.printErrorLikeObjectSimple(err, writer, false); + }, + } + + break :message fbs.getWritten(); + } orelse message: { + const suffix = "...truncated"; + @memcpy(buffer[buffer.len - suffix.len ..], suffix); + break :message &buffer; + }; + resp.end(message, true); // TODO: "You should never call res.end(huge buffer)" + } +}; + +// For debugging, it is helpful to be able to see bundles. +fn dumpBundle(dump_dir: std.fs.Dir, route: *Route, kind: BundleKind, files: []OutputFile) !void { + for (files) |file| { + const name = bun.path.joinAbsString("/", &.{ + route.pattern, + @tagName(kind), + file.dest_path, + }, .auto)[1..]; + var inner_dir = try dump_dir.makeOpenPath(bun.Dirname.dirname(u8, name).?, .{}); + defer inner_dir.close(); + + switch (file.value) { + .buffer => |buf| { + try inner_dir.writeFile(.{ .data = buf.bytes, .sub_path = bun.path.basename(name) }); + }, + else => |t| Output.panic("TODO: implement dumping .{s}", .{@tagName(t)}), + } + } +} + +/// This function is required by `HotReloader` +pub fn eventLoop(dev: *DevServer) *JSC.EventLoop { + return dev.vm.eventLoop(); +} + +pub fn onWebSocketUpgrade( + dev: *DevServer, + res: *Response, + req: *Request, + upgrade_ctx: *uws.uws_socket_context_t, + id: usize, +) void { + assert(id == 0); + + const dw = bun.new(DevWebSocket, .{ .dev = dev }); + res.upgrade( + *DevWebSocket, + dw, + req.header("sec-websocket-key") orelse "", + req.header("sec-websocket-protocol") orelse "", + req.header("sec-websocket-extension") orelse "", + upgrade_ctx, + ); +} + +const DevWebSocket = struct { + dev: *DevServer, + + pub fn onOpen(dw: *DevWebSocket, ws: AnyWebSocket) void { + _ = ws.send("bun!", .binary, false, false); + std.debug.print("open {*} {}\n", .{ dw, ws }); + } + + pub fn onMessage(dw: *DevWebSocket, ws: AnyWebSocket, msg: []const u8, opcode: uws.Opcode) void { + std.debug.print("message {*} {} {} '{s}'\n", .{ dw, ws, opcode, msg }); + } + + pub fn onClose(dw: *DevWebSocket, ws: AnyWebSocket, exit_code: i32, message: []const u8) void { + defer bun.destroy(dw); + + std.debug.print("close {*} {} {} '{s}'\n", .{ dw, ws, exit_code, message }); + } +}; + +/// Kit uses a special global object extending Zig::GlobalObject +pub const DevGlobalObject = opaque { + /// Safe downcast to use other Bun APIs + pub fn js(ptr: *DevGlobalObject) *JSC.JSGlobalObject { + return @ptrCast(ptr); + } + + pub fn vm(ptr: *DevGlobalObject) *JSC.VM { + return ptr.js().vm(); + } +}; + +pub const KitSourceProvider = opaque {}; + +pub const c = struct { + // KitDevGlobalObject.cpp + extern fn KitCreateDevGlobal(owner: *DevServer, console: *JSC.ConsoleObject) *DevGlobalObject; + + // KitSourceProvider.cpp + const LoadServerCodeResult = extern struct { promise: *JSInternalPromise, key: *JSC.JSString }; + extern fn KitLoadServerCode(global: *DevGlobalObject, code: bun.String) LoadServerCodeResult; + extern fn KitGetRequestHandlerFromModule(global: *DevGlobalObject, module: *JSC.JSString) JSValue; +}; + +pub fn reload(dev: *DevServer) void { + // TODO: given no arguments, this method is absolutely useless. The watcher + // must be augmented with more information. + _ = dev; + Output.warn("TODO: initiate hot reload", .{}); +} + +const std = @import("std"); +const Allocator = std.mem.Allocator; + +const bun = @import("root").bun; +const Environment = bun.Environment; +const assert = bun.assert; + +const Log = bun.logger.Log; + +const Bundler = bun.bundler.Bundler; +const BundleV2 = bun.bundle_v2.BundleV2; +const Define = bun.options.Define; +const OutputFile = bun.options.OutputFile; + +// TODO: consider if using system output is not fit +const Output = bun.Output; + +const uws = bun.uws; +const App = uws.NewApp(false); +const AnyWebSocket = uws.AnyWebSocket; +const Request = uws.Request; +const Response = App.Response; + +const MimeType = bun.http.MimeType; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; +const VirtualMachine = JSC.VirtualMachine; +const JSModuleLoader = JSC.JSModuleLoader; +const EventLoopHandle = JSC.EventLoopHandle; +const JSInternalPromise = JSC.JSInternalPromise; + +pub const HotReloader = JSC.NewHotReloader(DevServer, JSC.EventLoop, false); +pub const HotReloadTask = HotReloader.HotReloadTask; diff --git a/src/kit/KitDevGlobalObject.cpp b/src/kit/KitDevGlobalObject.cpp new file mode 100644 index 0000000000..e797f3f03b --- /dev/null +++ b/src/kit/KitDevGlobalObject.cpp @@ -0,0 +1,82 @@ +#include "KitDevGlobalObject.h" +#include "JavaScriptCore/GlobalObjectMethodTable.h" +#include "JSNextTickQueue.h" +#include "headers-handwritten.h" + +namespace Kit { + +#define INHERIT_HOOK_METHOD(name) Zig::GlobalObject::s_globalObjectMethodTable. name + +const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable = { + INHERIT_HOOK_METHOD(supportsRichSourceInfo), + INHERIT_HOOK_METHOD(shouldInterruptScript), + INHERIT_HOOK_METHOD(javaScriptRuntimeFlags), + INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop), + INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout), + INHERIT_HOOK_METHOD(moduleLoaderImportModule), + INHERIT_HOOK_METHOD(moduleLoaderResolve), + INHERIT_HOOK_METHOD(moduleLoaderFetch), + INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties), + INHERIT_HOOK_METHOD(moduleLoaderEvaluate), + INHERIT_HOOK_METHOD(promiseRejectionTracker), + INHERIT_HOOK_METHOD(reportUncaughtExceptionAtEventLoop), + INHERIT_HOOK_METHOD(currentScriptExecutionOwner), + INHERIT_HOOK_METHOD(scriptExecutionStatus), + INHERIT_HOOK_METHOD(reportViolationForUnsafeEval), + INHERIT_HOOK_METHOD(defaultLanguage), + INHERIT_HOOK_METHOD(compileStreaming), + INHERIT_HOOK_METHOD(instantiateStreaming), + INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject), + INHERIT_HOOK_METHOD(codeForEval), + INHERIT_HOOK_METHOD(canCompileStrings), +}; + +DevGlobalObject* DevGlobalObject::create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable) +{ + DevGlobalObject* ptr = new (NotNull, JSC::allocateCell(vm)) DevGlobalObject(vm, structure, methodTable); + ptr->finishCreation(vm); + return ptr; +} + +void DevGlobalObject::finishCreation(JSC::VM &vm) { + Base::finishCreation(vm); + ASSERT(inherits(info())); +} + +extern "C" BunVirtualMachine* Bun__getVM(); + +// A lot of this function is taken from 'Zig__GlobalObject__create' +extern "C" DevGlobalObject* KitCreateDevGlobal(DevServer* owner, void* console) { + JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef(); + vm.heap.acquireAccess(); + JSC::JSLockHolder locker(vm); + BunVirtualMachine* bunVM = Bun__getVM(); + WebCore::JSVMClientData::create(&vm, bunVM); + + JSC::Structure* structure = DevGlobalObject::createStructure(vm); + DevGlobalObject* global = DevGlobalObject::create(vm, structure, &DevGlobalObject::s_globalObjectMethodTable); + if (!global) + BUN_PANIC("Failed to create DevGlobalObject"); + + global->m_devServer = owner; + global->m_bunVM = bunVM; + + JSC::gcProtect(global); + + global->setConsole(console); + global->setStackTraceLimit(10); // Node.js defaults to 10 + + // vm.setOnComputeErrorInfo(computeErrorInfoWrapper); + vm.setOnEachMicrotaskTick([global](JSC::VM& vm) -> void { + if (auto nextTickQueue = global->m_nextTickQueue.get()) { + global->resetOnEachMicrotaskTick(); + Bun::JSNextTickQueue* queue = jsCast(nextTickQueue); + queue->drain(vm, global); + return; + } + }); + + return global; +} + +}; // namespace Kit diff --git a/src/kit/KitDevGlobalObject.h b/src/kit/KitDevGlobalObject.h new file mode 100644 index 0000000000..07126aca3a --- /dev/null +++ b/src/kit/KitDevGlobalObject.h @@ -0,0 +1,42 @@ +#pragma once +#include "root.h" +#include "ZigGlobalObject.h" + +namespace Kit { + +struct DevServer; // DevServer.zig +struct Route; // DevServer.zig +struct BunVirtualMachine; + +class DevGlobalObject : public Zig::GlobalObject { +public: + using Base = Zig::GlobalObject; + + template static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) + { + if constexpr (mode == JSC::SubspaceAccess::Concurrently) + return nullptr; + return WebCore::subspaceForImpl( + vm, + [](auto& spaces) { return spaces.m_clientSubspaceForKitGlobalScope.get(); }, + [](auto& spaces, auto&& space) { spaces.m_clientSubspaceForKitGlobalScope = std::forward(space); }, + [](auto& spaces) { return spaces.m_subspaceForKitGlobalScope.get(); }, + [](auto& spaces, auto&& space) { spaces.m_subspaceForKitGlobalScope = std::forward(space); }, + [](auto& server) -> JSC::HeapCellType& { return server.m_heapCellTypeForJSWorkerGlobalScope; }); + } + + static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable; + static DevGlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable); + + DevServer* m_devServer; + + void finishCreation(JSC::VM& vm); + + DevGlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable) + : Zig::GlobalObject(vm, structure, methodTable) { } +}; + +// Zig API +extern "C" DevGlobalObject* KitCreateDevGlobal(DevServer* owner, void* console); + +}; // namespace Kit diff --git a/src/kit/KitSourceProvider.cpp b/src/kit/KitSourceProvider.cpp new file mode 100644 index 0000000000..e28e1c4fd1 --- /dev/null +++ b/src/kit/KitSourceProvider.cpp @@ -0,0 +1,55 @@ +// clang-format off +#include "KitSourceProvider.h" +#include "JavaScriptCore/Completion.h" +#include "JavaScriptCore/Identifier.h" +#include "JavaScriptCore/JSCJSValue.h" +#include "JavaScriptCore/JSCast.h" +#include "JavaScriptCore/JSLock.h" +#include "JavaScriptCore/JSMap.h" +#include "JavaScriptCore/JSModuleLoader.h" +#include "JavaScriptCore/JSString.h" +#include "JavaScriptCore/JSModuleNamespaceObject.h" +#include "KitDevGlobalObject.h" + +namespace Kit { + + +extern "C" LoadServerCodeResult KitLoadServerCode(DevGlobalObject* global, BunString source) { + String string = "kit://server/0/index.js"_s; + JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string)); + JSC::SourceCode sourceCode = JSC::SourceCode(KitSourceProvider::create( + source.toWTFString(), + origin, + WTFMove(string), + WTF::TextPosition(), + JSC::SourceProviderSourceType::Module + )); + + JSC::JSString* key = JSC::jsString(global->vm(), string); + global->moduleLoader()->provideFetch(global, key, sourceCode); + + return { + global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined()), + key + }; +} + +extern "C" JSC::EncodedJSValue KitGetRequestHandlerFromModule( + DevGlobalObject* global, + JSC::JSString* key +) { + JSC::VM&vm = global->vm(); + JSC::JSMap* map = JSC::jsCast( + global->moduleLoader()->getDirect( + vm, JSC::Identifier::fromString(global->vm(), "registry"_s) + )); + JSC::JSValue entry = map->get(global, key); + ASSERT(entry.isObject()); // should have called KitLoadServerCode and wait for that promise + JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s)); + ASSERT(module.isCell()); + JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module); + ASSERT(namespaceObject); + return JSC::JSValue::encode(namespaceObject->get(global, vm.propertyNames->defaultKeyword)); +} + +} // namespace Kit diff --git a/src/kit/KitSourceProvider.h b/src/kit/KitSourceProvider.h new file mode 100644 index 0000000000..1aacb369c7 --- /dev/null +++ b/src/kit/KitSourceProvider.h @@ -0,0 +1,47 @@ +#pragma once +#include "root.h" +#include "headers-handwritten.h" +#include "KitDevGlobalObject.h" +#include "JavaScriptCore/SourceOrigin.h" + +namespace Kit { + +struct LoadServerCodeResult { + JSC::JSInternalPromise* promise; + JSC::JSString* key; +}; + +class KitSourceProvider final : public JSC::StringSourceProvider { +public: + static Ref create( + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType + ) { + return adoptRef(*new KitSourceProvider(source, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType)); + } + +private: + KitSourceProvider( + const String& source, + const JSC::SourceOrigin& sourceOrigin, + String&& sourceURL, + const TextPosition& startPosition, + JSC::SourceProviderSourceType sourceType + ) : StringSourceProvider( + source, + sourceOrigin, + JSC::SourceTaintedOrigin::Untainted, + WTFMove(sourceURL), + startPosition, + sourceType + ) {} +}; + +// Zig API +extern "C" LoadServerCodeResult KitLoadServerCode(DevGlobalObject* global, BunString source); +extern "C" JSC::EncodedJSValue KitGetRequestHandlerFromModule(DevGlobalObject* global, JSC::JSString* encodedModule); + +} // namespace Kit diff --git a/src/kit/client/overlay.css b/src/kit/client/overlay.css new file mode 100644 index 0000000000..6f4cc360ab --- /dev/null +++ b/src/kit/client/overlay.css @@ -0,0 +1,18 @@ +/* + * This file is mounted within Shadow DOM so interference with + * the user's application causes no issue. This sheet is used to + * style error popups and other elements provided by DevServer. + */ + +* { + box-sizing: border-box; +} + +main { + font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; +} + +.error { + padding: 1rem; + background-color: rgba(255, 0, 0, 0.2); +} \ No newline at end of file diff --git a/src/kit/client/overlay.ts b/src/kit/client/overlay.ts new file mode 100644 index 0000000000..9ada1d768d --- /dev/null +++ b/src/kit/client/overlay.ts @@ -0,0 +1,27 @@ +import { css } from '../macros' with { type: 'macro' }; + +// Create a root element to contain all our our DOM nodes. +var root!: HTMLElement; +var mount; + +if (mode === 'client') { + mount = function mount() { + const wrap = document.createElement('bun-hmr'); + wrap.setAttribute('style', 'position:absolute;display:block;top:0;left:0;width:100%;height:100%;background:transparent'); + const shadow = wrap.attachShadow({ mode: 'open' }); + + const sheet = new CSSStyleSheet(); + sheet.replace(css('client/overlay.css', IS_BUN_DEVELOPMENT)); + shadow.adoptedStyleSheets = [ sheet ]; + + root = document.createElement('main'); + shadow.appendChild(root); + document.body.appendChild(wrap); + } +} + +export function showErrorOverlay(e) { + mount(); + console.error(e); + root.innerHTML = `

oh no, a client side error happened:

${e?.message ? `${e?.name ?? (e?.constructor?.name) ?? 'Error'}: ${e.message}\n` : JSON.stringify(e)}${e?.message ? e?.stack : ''}
`; +} diff --git a/src/kit/hmr-module.ts b/src/kit/hmr-module.ts new file mode 100644 index 0000000000..440e1998cf --- /dev/null +++ b/src/kit/hmr-module.ts @@ -0,0 +1,65 @@ +import * as runtimeHelpers from '../runtime.bun.js'; + +const registry = new Map() + +export type ModuleLoadFunction = (module: HotModule) => void; +export type ExportsCallbackFunction = (new_exports: any) => void; + +/** + * This object is passed as the CommonJS "module", but has a bunch of + * non-standard properties that are used for implementing hot-module + * reloading. It is unacceptable to depend + */ +export class HotModule { + exports: any = {}; + + _ext_exports = undefined; + __esModule = false; + _import_meta?: ImportMeta; + + constructor(public id: Id) {} + + require(id: Id, onReload: null | ExportsCallbackFunction) { + return loadModule(id).exports; + } + + importSync(id: Id, onReload: null | ExportsCallbackFunction) { + const module = loadModule(id); + const { exports, __esModule } = module; + return __esModule + ? exports + : module._ext_exports ??= { ...exports, default: exports }; + } + + importMeta() { + return this._import_meta ??= initImportMeta(this); + } +} + +function initImportMeta(m: HotModule): ImportMeta { + throw new Error("TODO: import meta object"); +} + +// { +// const runtime = new HotModule(0); +// runtime.exports = runtimeHelpers; +// runtime.__esModule = true; +// registry.set(0, runtime); +// } + +export function loadModule(key: Id): HotModule { + let module = registry.get(key); + if (module) return module; + module = new HotModule(key); + registry.set(key, module); + const load = input_graph[key]; + if (!load) { + throw new Error(`Failed to load bundled module '${key}'. This is not a dynamic import, and therefore is a bug in Bun`); + } + load(module); + return module; +} + +runtimeHelpers.__name(HotModule.prototype.importSync, ' importSync') +runtimeHelpers.__name(HotModule.prototype.require, ' require') +runtimeHelpers.__name(loadModule, ' loadModule') diff --git a/src/kit/hmr-runtime-types.d.ts b/src/kit/hmr-runtime-types.d.ts new file mode 100644 index 0000000000..1a830ed220 --- /dev/null +++ b/src/kit/hmr-runtime-types.d.ts @@ -0,0 +1,35 @@ +/* + * A module id is an unsigned 52-bit numeric hash of the filepath. + * + * TODO: how resistant to hash collision is this? if it is not, an alternate approach must be taken. + */ +type Id = number; + +interface Config { + main: Id; + /** If available, this is the Id of `react-refresh/runtime` */ + refresh: Id; +} + +/** + * All modules for the initial bundle. + */ +declare const input_graph: Record; + +declare const config: Config; + +/** + * The runtime is bundled for server and client, which influences + * how hmr connection should be established, as well if there is + * a window to visually display errors with. +*/ +declare const mode: 'client' | 'server'; + +/* What should be `export default`'d */ +declare var server_fetch_function: any; + +/* + * If you are running a debug build of Bun. These debug builds should provide + * helpful information to someone working on the bundler itself. + */ +declare const IS_BUN_DEVELOPMENT: any; diff --git a/src/kit/hmr-runtime.ts b/src/kit/hmr-runtime.ts new file mode 100644 index 0000000000..fe7ba48d26 --- /dev/null +++ b/src/kit/hmr-runtime.ts @@ -0,0 +1,46 @@ +// This file is the entrypoint to the hot-module-reloading runtime +// In the browser, this uses a WebSocket to communicate with the bundler. +// On the server, communication is facilitated using a secret global. +import { loadModule } from './hmr-module'; +import { showErrorOverlay } from './client/overlay'; + +if (typeof IS_BUN_DEVELOPMENT !== 'boolean') { throw new Error('DCE is configured incorrectly') } + +// Initialize client-side features. +if (mode === 'client') { + const { refresh } = config; + if(refresh) { + const runtime = loadModule(refresh).exports; + runtime.injectIntoGlobalHook(window); + } +} + +// Load the entry point module +try { + const main = loadModule(config.main); + + // export it on the server side + if (mode === 'server') + server_fetch_function = main.exports.default; + + if (mode === 'client') { + const ws = new WebSocket('/_bun/hmr'); + ws.onopen = (ev) => { + console.log(ev); + } + ws.onmessage = (ev) => { + console.log(ev); + } + ws.onclose = (ev) => { + console.log(ev); + } + ws.onerror = (ev) => { + console.log(ev); + } + } +} catch (e) { + if (mode !== 'client') throw e; + showErrorOverlay(e); +} + +export {} diff --git a/src/kit/kit.zig b/src/kit/kit.zig new file mode 100644 index 0000000000..1ab5e7b4f6 --- /dev/null +++ b/src/kit/kit.zig @@ -0,0 +1,109 @@ +//! Kit is the code name for the work-in-progress "Framework API [SOON]" for Bun. + +/// Temporary function to invoke dev server via JavaScript. Will be +/// replaced with a user-facing API. Refs the event loop forever. +/// +/// Requires one argument object for configuration. Very little is +/// exposed over the JS api as it is not intended to be used for +/// real applications yet. +/// ```ts +/// interface WipDevServerOptions { +/// routes: WipDevServerRoute[] +/// } +/// interface WipDevServerRoute { +/// pattern: string; +/// entrypoint: string; +/// } +/// ``` +pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue { + if (!bun.FeatureFlags.kit) return .undefined; + + bun.Output.warn( + \\Be advised that Kit is highly experimental, and its API is subject to change + , .{}); + bun.Output.flush(); + + const options = devServerOptionsFromJs(global, callframe.argument(0)) catch { + if (!global.hasException()) + global.throwInvalidArguments("invalid arguments", .{}); + return .zero; + }; + + const t = std.Thread.spawn(.{}, wipDevServer, .{options}) catch @panic("Failed to start"); + t.detach(); + + { + var futex = std.atomic.Value(u32).init(0); + while (true) std.Thread.Futex.wait(&futex, 0); + } +} + +// TODO: this function leaks memory and bad error handling, but that is OK since +// this API is not finalized. +fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options { + if (!options.isObject()) return error.Invalid; + const routes_js = try options.getArray(global, "routes") orelse return error.Invalid; + + const len = routes_js.getLength(global); + const routes = try bun.default_allocator.alloc(DevServer.Route, len); + + var it = routes_js.arrayIterator(global); + var i: usize = 0; + while (it.next()) |route| : (i += 1) { + if (!route.isObject()) return error.Invalid; + + const pattern_js = route.get(global, "pattern") orelse return error.Invalid; + if (!pattern_js.isString()) return error.Invalid; + const entry_point_js = route.get(global, "entrypoint") orelse return error.Invalid; + if (!entry_point_js.isString()) return error.Invalid; + + const pattern = pattern_js.toBunString(global).toUTF8(bun.default_allocator); + defer pattern.deinit(); + // this dupe is stupid + const pattern_z = try bun.default_allocator.dupeZ(u8, pattern.slice()); + const entry_point = entry_point_js.toBunString(global).toUTF8(bun.default_allocator).slice(); // leak + + routes[i] = .{ + .pattern = pattern_z, + .entry_point = entry_point, + }; + } + + return .{ + .cwd = bun.getcwdAlloc(bun.default_allocator) catch bun.outOfMemory(), + .routes = routes, + }; +} + +export fn Bun__getTemporaryDevServer(global: *JSC.JSGlobalObject) JSValue { + if (!bun.FeatureFlags.kit) return .undefined; + return JSC.JSFunction.create(global, "wipDevServer", bun.JSC.toJSHostFunction(jsWipDevServer), 0, .{}); +} + +pub fn wipDevServer(options: DevServer.Options) noreturn { + bun.Output.Source.configureNamedThread("Dev Server"); + + const dev = DevServer.init(options); + dev.runLoopForever(); +} + +pub fn getHmrRuntime(mode: enum { server, client }) []const u8 { + return if (Environment.embed_code) + switch (mode) { + .client => @embedFile("kit-codegen/kit.client.js"), + .server => @embedFile("kit-codegen/kit.server.js"), + } + else switch (mode) { + inline else => |m| bun.runtimeEmbedFile(.codegen, "kit." ++ @tagName(m) ++ ".js"), + }; +} + +pub const DevServer = @import("./DevServer.zig"); + +const std = @import("std"); + +const bun = @import("root").bun; +const Environment = bun.Environment; + +const JSC = bun.JSC; +const JSValue = JSC.JSValue; diff --git a/src/kit/macros.ts b/src/kit/macros.ts new file mode 100644 index 0000000000..dd3f2aa8de --- /dev/null +++ b/src/kit/macros.ts @@ -0,0 +1,11 @@ +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; + +export function css(file: string, is_development: boolean): string { + const contents = readFileSync(resolve(import.meta.dir, file), 'utf-8'); + if (!is_development) { + // TODO: minify + return contents; + } + return contents; +} diff --git a/src/kit/tsconfig.json b/src/kit/tsconfig.json new file mode 100644 index 0000000000..36e3ffb7e8 --- /dev/null +++ b/src/kit/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "lib": ["DOM", "ESNext"], + "module": "esnext", + "target": "esnext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "strict": true, + "noImplicitAny": false, + "allowJs": true, + "downlevelIteration": true, + "esModuleInterop": true, + "skipLibCheck": true, + "jsx": "react-jsx", + }, + "include": ["./hmr-runtime-types.d.ts", "*.ts"] +} \ No newline at end of file diff --git a/src/logger.zig b/src/logger.zig index 70a301c166..0824f0e6a1 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -713,7 +713,7 @@ pub const Log = struct { pub fn toJS(this: Log, global: *JSC.JSGlobalObject, allocator: std.mem.Allocator, fmt: string) JSC.JSValue { const msgs: []const Msg = this.msgs.items; - var errors_stack: [256]*anyopaque = undefined; + var errors_stack: [256]JSC.JSValue = undefined; const count = @as(u16, @intCast(@min(msgs.len, errors_stack.len))); switch (count) { @@ -728,12 +728,12 @@ pub const Log = struct { else => { for (msgs[0..count], 0..) |msg, i| { errors_stack[i] = switch (msg.metadata) { - .build => JSC.BuildMessage.create(global, allocator, msg).asVoid(), - .resolve => JSC.ResolveMessage.create(global, allocator, msg, "").asVoid(), + .build => JSC.BuildMessage.create(global, allocator, msg), + .resolve => JSC.ResolveMessage.create(global, allocator, msg, ""), }; } const out = JSC.ZigString.init(fmt); - const agg = global.createAggregateError(errors_stack[0..count].ptr, count, &out); + const agg = global.createAggregateError(errors_stack[0..count], &out); return agg; }, } @@ -1221,7 +1221,7 @@ pub const Log = struct { }; } - pub fn printForLogLevelWithEnableAnsiColors(self: *Log, to: anytype, comptime enable_ansi_colors: bool) !void { + pub fn printForLogLevelWithEnableAnsiColors(self: *const Log, to: anytype, comptime enable_ansi_colors: bool) !void { var needs_newline = false; if (self.warnings > 0 and self.errors > 0) { // Print warnings at the top diff --git a/src/main.zig b/src/main.zig index 8aff3b147f..9c4df6072f 100644 --- a/src/main.zig +++ b/src/main.zig @@ -1,7 +1,5 @@ const std = @import("std"); const builtin = @import("builtin"); -pub const build_options = @import("build_options"); - const bun = @import("root").bun; const Output = bun.Output; const Environment = bun.Environment; diff --git a/src/napi/napi.zig b/src/napi/napi.zig index d48b85087b..51033f2243 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -793,7 +793,8 @@ pub export fn napi_make_callback(env: napi_env, _: *anyopaque, recv_: napi_value @as([*]const JSC.JSValue, @ptrCast(args.?))[0..arg_count] else &.{}, - ); + ) catch |err| // TODO: handle errors correctly + env.takeException(err); if (maybe_result) |result| { result.set(env, res); @@ -1561,7 +1562,6 @@ pub const ThreadSafeFunction = struct { pub fn call(this: *ThreadSafeFunction) void { const task = this.channel.tryReadItem() catch null orelse return; - const vm = this.event_loop.virtual_machine; const globalObject = this.env; this.tracker.willDispatch(globalObject); @@ -1572,10 +1572,9 @@ pub const ThreadSafeFunction = struct { if (js_function.isEmptyOrUndefinedOrNull()) { return; } - const err = js_function.call(globalObject, .undefined, &.{}); - if (err.isAnyError()) { - _ = vm.uncaughtException(globalObject, err, false); - } + + _ = js_function.call(globalObject, .undefined, &.{}) catch |err| + globalObject.reportActiveExceptionAsUnhandled(err); }, .c => |cb| { if (comptime bun.Environment.isDebug) { diff --git a/src/options.zig b/src/options.zig index 0339e43a9a..d3cf1bfde7 100644 --- a/src/options.zig +++ b/src/options.zig @@ -29,6 +29,8 @@ const Analytics = @import("./analytics/analytics_thread.zig"); const MacroRemap = @import("./resolver/package_json.zig").MacroMap; const DotEnv = @import("./env_loader.zig"); +pub const Define = defines.Define; + const assert = bun.assert; pub const WriteDestination = enum { @@ -589,14 +591,45 @@ pub const Target = enum { }; pub const Format = enum { + /// ES module format + /// This is the default format esm, - cjs, + + /// Immediately-invoked function expression + /// (function(){ + /// ... + /// })(); iife, + /// CommonJS + cjs, + + /// Kit's uses a special module format for Hot-module-reloading. It includes a + /// runtime payload, sourced from src/kit/hmr-runtime.ts. + /// + /// ((input_graph, entry_point_key) => { + /// ... runtime code ... + /// })([ + /// "module1.ts"(require, module) { ... }, + /// "module2.ts"(require, module) { ... }, + /// ], "module1.ts"); + internal_kit_dev, + + pub fn keepES6ImportExportSyntax(this: Format) bool { + return this == .esm; + } + + pub inline fn isESM(this: Format) bool { + return this == .esm; + } + pub const Map = bun.ComptimeStringMap(Format, .{ .{ "esm", .esm }, .{ "cjs", .cjs }, .{ "iife", .iife }, + + // TODO: Disable this outside of debug builds + .{ "internal_kit_dev", .internal_kit_dev }, }); pub fn fromJS(global: *JSC.JSGlobalObject, format: JSC.JSValue, exception: JSC.C.ExceptionRef) ?Format { @@ -1308,7 +1341,7 @@ pub const ResolveFileExtensions = struct { }; }; -pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api.LoaderMap, target: Target) !bun.StringArrayHashMap(Loader) { +pub fn loadersFromTransformOptions(allocator: std.mem.Allocator, _loaders: ?Api.LoaderMap, target: Target) std.mem.Allocator.Error!bun.StringArrayHashMap(Loader) { const input_loaders = _loaders orelse std.mem.zeroes(Api.LoaderMap); const loader_values = try allocator.alloc(Loader, input_loaders.loaders.len); @@ -1402,29 +1435,6 @@ pub const PackagesOption = enum { }); }; -pub const OutputFormat = enum { - preserve, - - /// ES module format - /// This is the default format - esm, - /// Immediately-invoked function expression - /// ( - /// function(){} - /// )(); - iife, - /// CommonJS - cjs, - - pub fn keepES6ImportExportSyntax(this: OutputFormat) bool { - return this == .esm; - } - - pub inline fn isESM(this: OutputFormat) bool { - return this == .esm; - } -}; - /// BundleOptions is used when ResolveMode is not set to "disable". /// BundleOptions is effectively webpack + babel pub const BundleOptions = struct { @@ -1443,6 +1453,7 @@ pub const BundleOptions = struct { react_server_components: bool = false, react_server_components_boundary: string = "", hot_module_reloading: bool = false, + react_fast_refresh: bool = false, inject: ?[]string = null, origin: URL = URL{}, output_dir_handle: ?Dir = null, @@ -1460,7 +1471,7 @@ pub const BundleOptions = struct { serve: bool = false, // only used by bundle_v2 - output_format: OutputFormat = .esm, + output_format: Format = .esm, append_package_version_in_query_string: bool = false, diff --git a/src/react-refresh.js b/src/react-refresh.js deleted file mode 100644 index d3453d5916..0000000000 --- a/src/react-refresh.js +++ /dev/null @@ -1,348 +0,0 @@ -// This is based on v0.11.0 of react-refresh -// The following changes: -// - Removed __DEV__ checks -// - inlined REACT_MEMO_TYPE & REACT_FORWARD_REF_TYPE -// - minified - -const F = "for" in Symbol ? Symbol.for("react.forward_ref") : 60112, - C = "for" in Symbol ? Symbol.for("react.memo") : 60115, - O = typeof WeakMap == "function" ? WeakMap : Map, - T = new Map(), - k = new O(), - m = new O(), - M = new O(); -let g = []; -const b = new Map(), - w = new Map(), - c = new Set(), - p = new Set(), - R = typeof WeakMap == "function" ? new WeakMap() : null; -let S = !1; -function _(e) { - if (e.fullKey !== null) return e.fullKey; - let t = e.ownKey, - n; - try { - n = e.getCustomHooks(); - } catch { - return (e.forceReset = !0), (e.fullKey = t), t; - } - for (let o = 0; o < n.length; o++) { - const l = n[o]; - if (typeof l != "function") return (e.forceReset = !0), (e.fullKey = t), t; - const s = m.get(l); - if (s === void 0) continue; - const r = _(s); - s.forceReset && (e.forceReset = !0), - (t += - ` ---- -` + r); - } - return (e.fullKey = t), t; -} -function D(e, t) { - const n = m.get(e), - o = m.get(t); - return n === void 0 && o === void 0 - ? !0 - : !(n === void 0 || o === void 0 || _(n) !== _(o) || o.forceReset); -} -function B(e) { - return e.prototype && e.prototype.isReactComponent; -} -function v(e, t) { - return B(e) || B(t) ? !1 : !!D(e, t); -} -function I(e) { - return M.get(e); -} -function P(e) { - const t = new Map(); - return ( - e.forEach((n, o) => { - t.set(o, n); - }), - t - ); -} -function L(e) { - const t = new Set(); - return ( - e.forEach((n) => { - t.add(n); - }), - t - ); -} -function H(e, t) { - try { - return e[t]; - } catch { - return; - } -} -function j() { - if (g.length === 0 || S) return null; - S = !0; - try { - const e = new Set(), - t = new Set(), - n = g; - (g = []), - n.forEach((f) => { - let [i, u] = f; - const a = i.current; - M.set(a, i), - M.set(u, i), - (i.current = u), - v(a, u) ? t.add(i) : e.add(i); - }); - const o = { updatedFamilies: t, staleFamilies: e }; - b.forEach((f) => { - f.setRefreshHandler(I); - }); - let l = !1, - s = null; - const r = L(p), - h = L(c), - d = P(w); - if ( - (r.forEach((f) => { - const i = d.get(f); - if (i === void 0) - throw new Error( - "Could not find helpers for a root. This is a bug in React Refresh.", - ); - if ((!p.has(f), R === null || !R.has(f))) return; - const u = R.get(f); - try { - i.scheduleRoot(f, u); - } catch (a) { - l || ((l = !0), (s = a)); - } - }), - h.forEach((f) => { - const i = d.get(f); - if (i === void 0) - throw new Error( - "Could not find helpers for a root. This is a bug in React Refresh.", - ); - !c.has(f); - try { - i.scheduleRefresh(f, o); - } catch (u) { - l || ((l = !0), (s = u)); - } - }), - l) - ) - throw s; - return o; - } finally { - S = !1; - } -} -function K(e, t) { - if ( - e === null || - (typeof e != "function" && typeof e != "object") || - k.has(e) - ) - return; - let n = T.get(t); - if ( - (n === void 0 ? ((n = { current: e }), T.set(t, n)) : g.push([n, e]), - k.set(e, n), - typeof e == "object" && e !== null) - ) - switch (H(e, "$$typeof")) { - case F: - K(e.render, t + "$render"); - break; - case C: - K(e.type, t + "$type"); - break; - } -} -function E(e, t) { - let n = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : !1, - o = arguments.length > 3 ? arguments[3] : void 0; - if ( - (m.has(e) || - m.set(e, { - forceReset: n, - ownKey: t, - fullKey: null, - getCustomHooks: o || (() => []), - }), - typeof e == "object" && e !== null) - ) - switch (H(e, "$$typeof")) { - case F: - E(e.render, t, n, o); - break; - case C: - E(e.type, t, n, o); - break; - } -} -function A(e) { - const t = m.get(e); - t !== void 0 && _(t); -} -function $(e) { - return T.get(e); -} -function W(e) { - return k.get(e); -} -function x(e) { - const t = new Set(); - return ( - c.forEach((n) => { - const o = w.get(n); - if (o === void 0) - throw new Error( - "Could not find helpers for a root. This is a bug in React Refresh.", - ); - o.findHostInstancesForRefresh(n, e).forEach((s) => { - t.add(s); - }); - }), - t - ); -} -function z(e) { - let t = e.__REACT_DEVTOOLS_GLOBAL_HOOK__; - if (t === void 0) { - let s = 0; - e.__REACT_DEVTOOLS_GLOBAL_HOOK__ = t = { - renderers: new Map(), - supportsFiber: !0, - inject(r) { - return s++; - }, - onScheduleFiberRoot(r, h, d) {}, - onCommitFiberRoot(r, h, d, f) {}, - onCommitFiberUnmount() {}, - }; - } - if (t.isDisabled) { - console.warn( - "Something has shimmed the React DevTools global hook (__REACT_DEVTOOLS_GLOBAL_HOOK__). Fast Refresh is not compatible with this shim and will be disabled.", - ); - return; - } - const n = t.inject; - (t.inject = function (s) { - const r = n.apply(this, arguments); - return ( - typeof s.scheduleRefresh == "function" && - typeof s.setRefreshHandler == "function" && - b.set(r, s), - r - ); - }), - t.renderers.forEach((s, r) => { - typeof s.scheduleRefresh == "function" && - typeof s.setRefreshHandler == "function" && - b.set(r, s); - }); - const o = t.onCommitFiberRoot, - l = t.onScheduleFiberRoot || (() => {}); - (t.onScheduleFiberRoot = function (s, r, h) { - return ( - S || (p.delete(r), R !== null && R.set(r, h)), l.apply(this, arguments) - ); - }), - (t.onCommitFiberRoot = function (s, r, h, d) { - const f = b.get(s); - if (f !== void 0) { - w.set(r, f); - const i = r.current, - u = i.alternate; - if (u !== null) { - const a = - u.memoizedState != null && - u.memoizedState.element != null && - c.has(r), - y = i.memoizedState != null && i.memoizedState.element != null; - !a && y - ? (c.add(r), p.delete(r)) - : (a && y) || - (a && !y - ? (c.delete(r), d ? p.add(r) : w.delete(r)) - : !a && !y && d && p.add(r)); - } else c.add(r); - } - return o.apply(this, arguments); - }); -} -function G() { - return !1; -} -function N() { - return c.size; -} -function U() { - let e, - t, - n = !1; - return function (o, l, s, r) { - if (typeof l == "string") - return ( - e || ((e = o), (t = typeof r == "function")), - o != null && - (typeof o == "function" || typeof o == "object") && - E(o, l, s, r), - o - ); - !n && t && ((n = !0), A(e)); - }; -} -function V(e) { - switch (typeof e) { - case "function": { - if (e.prototype != null) { - if (e.prototype.isReactComponent) return !0; - const n = Object.getOwnPropertyNames(e.prototype); - if ( - n.length > 1 || - n[0] !== "constructor" || - e.prototype.__proto__ !== Object.prototype - ) - return !1; - } - const t = e.name || e.displayName; - return typeof t == "string" && /^[A-Z]/.test(t); - } - case "object": { - if (e != null) - switch (H(e, "$$typeof")) { - case F: - case C: - return !0; - default: - return !1; - } - return !1; - } - default: - return !1; - } -} -export { - N as _getMountedRootCount, - A as collectCustomHooksForSignature, - U as createSignatureFunctionForTransform, - x as findAffectedHostInstances, - $ as getFamilyByID, - W as getFamilyByType, - G as hasUnrecoverableErrors, - z as injectIntoGlobalHook, - V as isLikelyComponentType, - j as performReactRefresh, - K as register, - E as setSignature, -}; diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index e87873c37f..11052982e7 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -662,9 +662,8 @@ pub const Resolver = struct { comptime preference: PackageJSON.LoadFramework, comptime load_defines: bool, ) !void { - // We want to enable developers to integrate frameworks without waiting on official support. - // But, we still want the command to do the actual framework integration to be succint + // But, we still want the command to do the actual framework integration to be succinct // This lets users type "--use next" instead of "--use bun-framework-next" // If they're using a local file path, we skip this. if (isPackagePath(package)) { diff --git a/src/runtime.js b/src/runtime.js index f1adb2e6c8..b311e9f15f 100644 --- a/src/runtime.js +++ b/src/runtime.js @@ -2,10 +2,8 @@ // value. And since it isnt exported, it will always be tree-shaken away. var __INVALID__REF__; -var tagSymbol; -var cjsRequireSymbol; -// This ordering is deliberate so that the printer does optimizes these into a -// single destructuring assignment. +// This ordering is deliberate so that the printer optimizes +// them into a single destructuring assignment. var __create = Object.create; var __descs = Object.getOwnPropertyDescriptors; var __getProtoOf = Object.getPrototypeOf; @@ -14,8 +12,6 @@ var __getOwnPropNames = Object.getOwnPropertyNames; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __hasOwnProp = Object.prototype.hasOwnProperty; -export var __markAsModule = target => __defProp(target, "__esModule", { value: true, configurable: true }); - // This is used to implement "export * from" statements. It copies properties // from the imported module to the current module's ESM export object. If the // current module is an entry point and the target format is CommonJS, we @@ -86,104 +82,9 @@ export var __toCommonJS = /* @__PURE__ */ from => { return entry; }; -// lazy require to prevent loading one icon from a design system -export var $$lzy = (target, mod, props) => { - for (let key in props) { - if (!__hasOwnProp.call(target, key)) - __defProp(target, key, { - get: () => mod()[props[key]], - enumerable: true, - configurable: true, - }); - } - return target; -}; - // When you do know the module is CJS export var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports); -// When you don't know if the module is going to be ESM or CJS -export var __cJS2eSM = (cb, name) => { - var mod; - var origExports; - var has_run = false; - tagSymbol ??= Symbol.for("CommonJSTransformed"); - cjsRequireSymbol ??= Symbol.for("CommonJS"); - - const requireFunction = function load() { - if (has_run) { - return mod.exports; - } - - has_run = true; - cb(((mod = { exports: {} }), mod), mod.exports); - - var mod_exports = (origExports = mod.exports); - - const kind = typeof mod_exports; - - if ((kind === "object" || kind === "function") && !mod_exports[tagSymbol]) { - const extensible = Object.isExtensible(mod_exports); - if (!extensible) { - // slow path: it's a function we need to wrap - // example: webpack - if (kind === "function") { - mod_exports = function () { - return origExports.apply(this, arguments); - }; - Object.setPrototypeOf(mod_exports, __getProtoOf(origExports)); - Object.defineProperties(mod_exports, Object.getOwnPropertyDescriptors(origExports)); - } else { - mod_exports = __create(__getProtoOf(mod_exports), Object.getOwnPropertyDescriptors(mod_exports)); - } - } - - Object.defineProperty(mod_exports, tagSymbol, { - value: true, - enumerable: false, - configurable: false, - }); - - if (!("default" in mod_exports)) { - Object.defineProperty(mod_exports, "default", { - get() { - return origExports; - }, - set(v) { - if (v === mod.exports) return; - origExports = v; - return true; - }, - // enumerable: false is important here - enumerable: false, - configurable: true, - }); - } - - if (!extensible) { - // can only be frozen if it's not extensible - if (Object.isFrozen(origExports)) { - Object.freeze(mod_exports); - } else { - Object.preventExtensions(mod_exports); - } - } - } - - return mod_exports; - }; - - requireFunction[cjsRequireSymbol] = 1; - return requireFunction; -}; - -export var __internalIsCommonJSNamespace = /* @__PURE__ */ namespace => - namespace != null && - typeof namespace === "object" && - ((namespace.default && namespace.default[cjsRequireSymbol]) || namespace[cjsRequireSymbol]); - -export var $$m = __commonJS; - export var __name = (target, name) => { Object.defineProperty(target, "name", { value: name, @@ -227,12 +128,12 @@ export var __exportDefault = (target, value) => { }); }; -function hasAnyProps(obj) { +function __hasAnyProps(obj) { for (let key in obj) return true; return false; } -function mergeDefaultProps(props, defaultProps) { +function __mergeDefaultProps(props, defaultProps) { var result = __create(defaultProps, __descs(props)); for (let key in defaultProps) { @@ -243,11 +144,11 @@ function mergeDefaultProps(props, defaultProps) { return result; } export var __merge = (props, defaultProps) => { - return !hasAnyProps(defaultProps) + return !__hasAnyProps(defaultProps) ? props - : !hasAnyProps(props) + : !__hasAnyProps(props) ? defaultProps - : mergeDefaultProps(props, defaultProps); + : __mergeDefaultProps(props, defaultProps); }; export var __legacyDecorateClassTS = function (decorators, target, key, desc) { diff --git a/src/runtime.zig b/src/runtime.zig index d722b37cff..d42d657c7d 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -52,8 +52,6 @@ pub const ErrorCSS = struct { } }; -pub const ReactRefresh = @embedFile("./react-refresh.js"); - pub const ErrorJS = struct { pub inline fn sourceContent() string { if (comptime Environment.isDebug) { @@ -117,7 +115,7 @@ pub const Fallback = struct { pub inline fn scriptContent() string { if (comptime Environment.isDebug) { - const dirpath = comptime bun.Environment.base_path ++ (bun.Dirname.dirname(u8, @src().file) orelse ""); + const dirpath = comptime bun.Environment.base_path ++ "/" ++ (bun.Dirname.dirname(u8, @src().file) orelse ""); var buf: bun.PathBuffer = undefined; const user = bun.getUserName(&buf) orelse ""; const dir = std.mem.replaceOwned( @@ -208,8 +206,17 @@ pub const Runtime = struct { } pub const Features = struct { + /// Enable the React Fast Refresh transform. What this does exactly + /// is documented in js_parser, search for `const ReactRefresh` react_fast_refresh: bool = false, + + /// `hot_module_reloading` is specific to if we are using bun.kit.DevServer. + /// It can be enabled on the command line with --format=internal_kit_dev + /// + /// Standalone usage of this flag / usage of this flag + /// without '--format' set is an unsupported use case. hot_module_reloading: bool = false, + is_macro_runtime: bool = false, top_level_await: bool = false, auto_import_jsx: bool = false, @@ -231,11 +238,16 @@ pub const Runtime = struct { trim_unused_imports: bool = false, /// Use `import.meta.require()` instead of require()? - /// This is only supported in Bun. + /// This is only supported with --target=bun use_import_meta_require: bool = false, replace_exports: ReplaceableExport.Map = .{}, + /// Scan for '// @bun' at the top of this file, halting a parse if it is + /// seen. This is used in `bun run` after a `bun build --target=bun`, + /// and you know the contents is already correct. + /// + /// This comment must never be used manually. dont_bundle_twice: bool = false, /// This is a list of packages which even when require() is used, we will @@ -310,29 +322,25 @@ pub const Runtime = struct { pub const ActivateFunction = "activate"; }; + /// See js_parser.StaticSymbolName pub const GeneratedSymbol = struct { primary: Ref, backup: Ref, ref: Ref, + + pub const empty: GeneratedSymbol = .{ .ref = Ref.None, .primary = Ref.None, .backup = Ref.None }; }; - // If you change this, remember to update "runtime.footer.js" and rebuild the runtime.js + // If you change this, remember to update "runtime.js" pub const Imports = struct { __name: ?GeneratedSymbol = null, - __toModule: ?GeneratedSymbol = null, - __cJS2eSM: ?GeneratedSymbol = null, __require: ?GeneratedSymbol = null, __export: ?GeneratedSymbol = null, __reExport: ?GeneratedSymbol = null, - __load: ?GeneratedSymbol = null, - @"$$m": ?GeneratedSymbol = null, - @"$$lzy": ?GeneratedSymbol = null, - __HMRModule: ?GeneratedSymbol = null, - __HMRClient: ?GeneratedSymbol = null, - __FastRefreshModule: ?GeneratedSymbol = null, __exportValue: ?GeneratedSymbol = null, __exportDefault: ?GeneratedSymbol = null, - __FastRefreshRuntime: ?GeneratedSymbol = null, + // __refreshRuntime: ?GeneratedSymbol = null, + // __refreshSig: ?GeneratedSymbol = null, // $RefreshSig$ __merge: ?GeneratedSymbol = null, __legacyDecorateClassTS: ?GeneratedSymbol = null, __legacyDecorateParamTS: ?GeneratedSymbol = null, @@ -342,23 +350,12 @@ pub const Runtime = struct { __callDispose: ?GeneratedSymbol = null, pub const all = [_][]const u8{ - // __HMRClient goes first - // This is so we can call Bun.activate(true) as soon as possible - "__HMRClient", "__name", - "__toModule", "__require", - "__cJS2eSM", "__export", "__reExport", - "__load", - "$$m", - "$$lzy", - "__HMRModule", - "__FastRefreshModule", "__exportValue", "__exportDefault", - "__FastRefreshRuntime", "__merge", "__legacyDecorateClassTS", "__legacyDecorateParamTS", @@ -413,7 +410,7 @@ pub const Runtime = struct { defer this.i += 1; switch (this.i) { - inline 0...21 => |t| { + inline 0...all.len - 1 => |t| { if (@field(this.runtime_imports, all[t])) |val| { return Entry{ .key = t, .value = val.ref }; } @@ -429,7 +426,7 @@ pub const Runtime = struct { }; pub fn iter(imports: *Imports) Iterator { - return Iterator{ .runtime_imports = imports }; + return .{ .runtime_imports = imports }; } pub fn contains(imports: *const Imports, comptime key: string) bool { @@ -462,7 +459,7 @@ pub const Runtime = struct { key: anytype, ) ?Ref { return switch (key) { - inline 0...21 => |t| (@field(imports, all[t]) orelse return null).ref, + inline 0...all.len - 1 => |t| (@field(imports, all[t]) orelse return null).ref, else => null, }; } diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 5c79f18655..dcf6ccb95f 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -1705,7 +1705,11 @@ pub const Interpreter = struct { const loop = this.event_loop.js; this.keep_alive.disable(); loop.enter(); - _ = resolve.call(globalThis, .undefined, &.{ JSValue.jsNumberFromU16(exit_code), this.getBufferedStdout(globalThis), this.getBufferedStderr(globalThis) }); + _ = resolve.call(globalThis, .undefined, &.{ + JSValue.jsNumberFromU16(exit_code), + this.getBufferedStdout(globalThis), + this.getBufferedStderr(globalThis), + }) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); JSC.Codegen.JSShellInterpreter.resolveSetCached(this_jsvalue, globalThis, .undefined); JSC.Codegen.JSShellInterpreter.rejectSetCached(this_jsvalue, globalThis, .undefined); loop.exit(); @@ -1731,7 +1735,11 @@ pub const Interpreter = struct { this.keep_alive.disable(); loop.enter(); - reject.call(globalThis, &[_]JSValue{ JSValue.jsNumberFromChar(1), this.getBufferedStdout(globalThis), this.getBufferedStderr(globalThis) }); + _ = reject.call(globalThis, &[_]JSValue{ + JSValue.jsNumberFromChar(1), + this.getBufferedStdout(globalThis), + this.getBufferedStderr(globalThis), + }) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); JSC.Codegen.JSShellInterpreter.resolveSetCached(this_jsvalue, globalThis, .undefined); JSC.Codegen.JSShellInterpreter.rejectSetCached(this_jsvalue, globalThis, .undefined); diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index 35195afecd..009146cdf9 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -2888,7 +2888,7 @@ pub const PostgresSQLConnection = struct { &[_]JSValue{ instance, }, - ); + ) catch |e| this.globalObject.reportActiveExceptionAsUnhandled(e); } pub fn onClose(this: *PostgresSQLConnection) void { diff --git a/src/watcher.zig b/src/watcher.zig index aad3b5c4f9..b8bfbc2574 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -535,6 +535,8 @@ pub fn getHash(filepath: string) HashType { return @as(HashType, @truncate(bun.hash(filepath))); } +// TODO: this should not be a function with a generic context. every function +// besides `watchLoop` does not refer to context. pub fn NewWatcher(comptime ContextType: type) type { return struct { const Watcher = @This(); @@ -595,7 +597,7 @@ pub fn NewWatcher(comptime ContextType: type) type { this.running = false; } else { // if the mutex is locked, then that's now a UAF. - this.mutex.releaseAssertUnlocked("Internal consistency error: watcher mutex is locked when it should not be."); + this.mutex.releaseAssertUnlocked("Watcher mutex is locked when it should not be."); if (close_descriptors and this.running) { const fds = this.watchlist.items(.fd); @@ -700,7 +702,7 @@ pub fn NewWatcher(comptime ContextType: type) type { null, ); - // Give the events more time to coallesce + // Give the events more time to coalesce if (count_ < 128 / 2) { const remain = 128 - count_; var timespec = std.posix.timespec{ .tv_sec = 0, .tv_nsec = 100_000 }; @@ -1276,5 +1278,9 @@ pub fn NewWatcher(comptime ContextType: type) type { } } } + + pub fn getResolveWatcher(watcher: *Watcher) bun.resolver.AnyResolveWatcher { + return bun.resolver.ResolveWatcher(*@This(), @typeInfo(ContextType).Pointer.child.onMaybeWatchDirectory).init(watcher); + } }; } diff --git a/test/bundler/__snapshots__/bun-build-api.test.ts.snap b/test/bundler/__snapshots__/bun-build-api.test.ts.snap index e17a2204bc..ee8156b900 100644 --- a/test/bundler/__snapshots__/bun-build-api.test.ts.snap +++ b/test/bundler/__snapshots__/bun-build-api.test.ts.snap @@ -94,3 +94,478 @@ NS.then(({ fn: fn2 }) => { }); " `; + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"r6c8x1cc"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"vanwb97w"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"r6c8x1cc"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"e1cnkf2m"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"e1cnkf2m"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"e1cnkf2m"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build Bun.write(BuildArtifact) 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build outdir + reading out blobs works 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; + +exports[`Bun.build BuildArtifact properties: hash 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties + entry.naming: hash 1`] = `"e1cnkf2m"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js 1`] = `"5909xc4p"`; + +exports[`Bun.build BuildArtifact properties sourcemap: hash index.js.map 1`] = `"00000000"`; + +exports[`Bun.build new Response(BuildArtifact) sets content type: response text 1`] = ` +"var __defProp = Object.defineProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { + get: all[name], + enumerable: true, + configurable: true, + set: (newValue) => all[name] = () => newValue + }); +}; + +// test/bundler/fixtures/trivial/fn.js +var exports_fn = {}; +__export(exports_fn, { + fn: () => fn +}); +function fn(a) { + return a + 42; +} + +// test/bundler/fixtures/trivial/index.js +var NS = Promise.resolve().then(() => exports_fn); +NS.then(({ fn: fn2 }) => { + console.log(fn2(42)); +}); +" +`; diff --git a/test/bundler/bundler_kit_dev.test.ts b/test/bundler/bundler_kit_dev.test.ts new file mode 100644 index 0000000000..753a33c671 --- /dev/null +++ b/test/bundler/bundler_kit_dev.test.ts @@ -0,0 +1,39 @@ +import { itBundled } from "./expectBundled"; +import { describe, expect } from "bun:test"; + +describe("bundler", async () => { + itBundled('kit_dev/HelloWorld', { + files: { + '/a.js': `console.log("Hello, world!")`, + }, + format: 'internal_kit_dev', + target: 'bun', + run: { stdout: 'Hello, world!' }, + onAfterBundle(api) { + // `importSync` is one of the functions the runtime includes. + // it is on a property access so it will not be mangled + api.expectFile('out.js').toContain('importSync'); + }, + }); + itBundled('kit_dev/SimpleCommonJS', { + files: { + '/a.js': `console.log(require('./b').message)`, + '/b.js': `module.exports = { message: "Hello, world!" }`, + }, + format: 'internal_kit_dev', + target: 'bun', + run: { stdout: 'Hello, world!' }, + }); + itBundled('kit_dev/SimpleESM', { + files: { + '/a.js': ` + import message from './b'; + console.log(message); + `, + '/b.js': `export default "Hello, world!"`, + }, + format: 'internal_kit_dev', + target: 'bun', + run: { stdout: 'Hello, world!' }, + }); +}); diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index 6107cbb9f2..558e211237 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -161,7 +161,7 @@ export interface BundlerTestInput { /** Defaults to "bundle" */ packages?: "bundle" | "external"; /** Defaults to "esm" */ - format?: "esm" | "cjs" | "iife"; + format?: "esm" | "cjs" | "iife" | "internal_kit_dev"; globalName?: string; ignoreDCEAnnotations?: boolean; emitDCEAnnotations?: boolean; @@ -337,6 +337,8 @@ export interface BundlerTestRunOptions { */ errorLineMatch?: RegExp; + env?: Record; + runtime?: "bun" | "node"; setCwd?: boolean; @@ -477,8 +479,8 @@ function expectBundled( if (bundling === false && entryPoints.length > 1) { throw new Error("bundling:false only supports a single entry point"); } - if (!ESBUILD && format !== "esm") { - throw new Error("formats besides esm not implemented in bun build"); + if (!ESBUILD && (format === "cjs" || format === 'iife')) { + throw new Error(`format ${format} not implemented in bun build`); } if (!ESBUILD && metafile) { throw new Error("metafile not implemented in bun build"); @@ -628,7 +630,7 @@ function expectBundled( outfile ? `--outfile=${outfile}` : `--outdir=${outdir}`, define && Object.entries(define).map(([k, v]) => ["--define", `${k}=${v}`]), `--target=${target}`, - // `--format=${format}`, + `--format=${format}`, external && external.map(x => ["--external", x]), packages && ["--packages", packages], conditions && conditions.map(x => ["--conditions", x]), @@ -879,10 +881,11 @@ function expectBundled( if (!ESBUILD) { const warningText = stderr!.toUnixString(); const allWarnings = warnParser(warningText).map(([error, source]) => { + if(!source) return; const [_str2, fullFilename, line, col] = source.match(/bun-build-tests[\/\\](.*):(\d+):(\d+)/)!; const file = fullFilename.slice(id.length + path.basename(tempDirectory).length + 1).replaceAll("\\", "/"); return { error, file, line, col }; - }); + }).filter(Boolean); const expectedWarnings = bundleWarnings ? Object.entries(bundleWarnings).flatMap(([file, v]) => v.map(error => ({ file, error }))) : null; diff --git a/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap b/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap index 5f19f866fe..50a51491f4 100644 --- a/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap +++ b/test/bundler/transpiler/__snapshots__/transpiler.test.js.snap @@ -181,3 +181,555 @@ export { }; " `; + +exports[`Bun.Transpiler using statements work right 1`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 0); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +__callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +}" +`; + +exports[`Bun.Transpiler using statements work right 2`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 1); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +var __bun_temp_ref_5$ = __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +__bun_temp_ref_5$ && await __bun_temp_ref_5$; +}" +`; + +exports[`Bun.Transpiler using statements work right 3`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 4`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 5`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 6`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 7`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 8`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 9`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 10`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using top level 1`] = ` +"import { +__callDispose as __callDispose, +__using as __using +} from "bun:wrap"; +export function c(e) { + let __bun_temp_ref_1$ = []; + try { + const f = __using(__bun_temp_ref_1$, g(a), 0); + return f.h; + } catch (__bun_temp_ref_2$) { + var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; + } finally { + __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); + } +} +import {using} from "n"; +let __bun_temp_ref_5$ = []; +try { + var a = __using(__bun_temp_ref_5$, b, 0); + var j = __using(__bun_temp_ref_5$, c(i), 1); + var k = __using(__bun_temp_ref_5$, l(m), 0); + var o = __using(__bun_temp_ref_5$, using, 0); + var p = __using(__bun_temp_ref_5$, await using, 1); + var q = r; +} catch (__bun_temp_ref_6$) { + var __bun_temp_ref_7$ = __bun_temp_ref_6$, __bun_temp_ref_8$ = 1; +} finally { + var __bun_temp_ref_9$ = __callDispose(__bun_temp_ref_5$, __bun_temp_ref_7$, __bun_temp_ref_8$); + __bun_temp_ref_9$ && await __bun_temp_ref_9$; +} + +export { + k, + q +}; +" +`; + +exports[`Bun.Transpiler using statements work right 1`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 0); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, +__bun_temp_ref_4$ = 1; +} finally { +__callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +}" +`; + +exports[`Bun.Transpiler using statements work right 2`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 1); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, +__bun_temp_ref_4$ = 1; +} finally { +var __bun_temp_ref_5$ = __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +__bun_temp_ref_5$ && await __bun_temp_ref_5$; +}" +`; + +exports[`Bun.Transpiler using statements work right 3`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 4`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 5`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 6`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 7`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 8`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 9`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 10`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, +__bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using top level 1`] = ` +"import { __callDispose as __callDispose, __using as __using } from "bun:wrap"; +export function c(e) { + let __bun_temp_ref_1$ = []; + try { + const f = __using(__bun_temp_ref_1$, g(a), 0); + return f.h; + } catch (__bun_temp_ref_2$) { + var __bun_temp_ref_3$ = __bun_temp_ref_2$, + __bun_temp_ref_4$ = 1; + } finally { + __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); + } +} +import { using } from "n"; +let __bun_temp_ref_5$ = []; +try { + var a = __using(__bun_temp_ref_5$, b, 0); + var j = __using(__bun_temp_ref_5$, c(i), 1); + var k = __using(__bun_temp_ref_5$, l(m), 0); + var o = __using(__bun_temp_ref_5$, using, 0); + var p = __using(__bun_temp_ref_5$, await using, 1); + var q = r; +} catch (__bun_temp_ref_6$) { + var __bun_temp_ref_7$ = __bun_temp_ref_6$, + __bun_temp_ref_8$ = 1; +} finally { + var __bun_temp_ref_9$ = __callDispose(__bun_temp_ref_5$, __bun_temp_ref_7$, __bun_temp_ref_8$); + __bun_temp_ref_9$ && await __bun_temp_ref_9$; +} + +export { + k, + q +}; +" +`; + +exports[`Bun.Transpiler using statements work right 1`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 0); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +__callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +}" +`; + +exports[`Bun.Transpiler using statements work right 2`] = ` +"let __bun_temp_ref_1$ = []; +try { +const x = __using(__bun_temp_ref_1$, a, 1); +} catch (__bun_temp_ref_2$) { +var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; +} finally { +var __bun_temp_ref_5$ = __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); +__bun_temp_ref_5$ && await __bun_temp_ref_5$; +}" +`; + +exports[`Bun.Transpiler using statements work right 3`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 4`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 5`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 6`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 7`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 8`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 0); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +__callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +} +}" +`; + +exports[`Bun.Transpiler using statements work right 9`] = ` +"for (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using statements work right 10`] = ` +"for await (const __bun_temp_ref_1$ of b) { +let __bun_temp_ref_2$ = []; +try { +const a = __using(__bun_temp_ref_2$, __bun_temp_ref_1$, 1); +c(a); +a(c); +} catch (__bun_temp_ref_3$) { +var __bun_temp_ref_4$ = __bun_temp_ref_3$, __bun_temp_ref_5$ = 1; +} finally { +var __bun_temp_ref_6$ = __callDispose(__bun_temp_ref_2$, __bun_temp_ref_4$, __bun_temp_ref_5$); +__bun_temp_ref_6$ && await __bun_temp_ref_6$; +} +}" +`; + +exports[`Bun.Transpiler using top level 1`] = ` +"import { __callDispose as __callDispose, __using as __using } from "bun:wrap"; +export function c(e) { + let __bun_temp_ref_1$ = []; + try { + const f = __using(__bun_temp_ref_1$, g(a), 0); + return f.h; + } catch (__bun_temp_ref_2$) { + var __bun_temp_ref_3$ = __bun_temp_ref_2$, __bun_temp_ref_4$ = 1; + } finally { + __callDispose(__bun_temp_ref_1$, __bun_temp_ref_3$, __bun_temp_ref_4$); + } +} +import { using } from "n"; +let __bun_temp_ref_5$ = []; +try { + var a = __using(__bun_temp_ref_5$, b, 0); + var j = __using(__bun_temp_ref_5$, c(i), 1); + var k = __using(__bun_temp_ref_5$, l(m), 0); + var o = __using(__bun_temp_ref_5$, using, 0); + var p = __using(__bun_temp_ref_5$, await using, 1); + var q = r; +} catch (__bun_temp_ref_6$) { + var __bun_temp_ref_7$ = __bun_temp_ref_6$, __bun_temp_ref_8$ = 1; +} finally { + var __bun_temp_ref_9$ = __callDispose(__bun_temp_ref_5$, __bun_temp_ref_7$, __bun_temp_ref_8$); + __bun_temp_ref_9$ && await __bun_temp_ref_9$; +} + +export { + k, + q +}; +" +`; diff --git a/test/bundler/transpiler/transpiler.test.js b/test/bundler/transpiler/transpiler.test.js index 965716f9aa..8e8fd6afe4 100644 --- a/test/bundler/transpiler/transpiler.test.js +++ b/test/bundler/transpiler/transpiler.test.js @@ -102,14 +102,16 @@ describe("Bun.Transpiler", () => { it("doesn't hang indefinitely #2746", () => { // this test passes by not hanging - expect(() => - transpiler.transformSync(` - class Test { - test() { - - } - `), - ).toThrow(); + expect(() => { + console.log('1'); + const y = transpiler.transformSync(` + class Test { + test() { + + } + `); + console.error(y); + }).toThrow(); }); describe("property access inlining", () => { @@ -1667,7 +1669,7 @@ console.log(
);`), }); it("import with unicode escape", () => { - expectPrinted_(`import { name } from 'mod\\u1011';`, `import {name} from "mod\\u1011"`); + expectPrinted_(`import { name } from 'mod\\u1011';`, `import { name } from "mod\\u1011"`); }); it("fold string addition", () => { @@ -3137,7 +3139,7 @@ console.log(foo, array); import {ɵtest} from 'foo' `); - expect(out).toBe('import {ɵtest} from "foo";\n'); + expect(out).toBe('import { ɵtest } from "foo";\n'); }); const importLines = ["import {createElement, bacon} from 'react';", "import {bacon, createElement} from 'react';"]; diff --git a/test/js/bun/test/__snapshots__/test-interop.js.snap b/test/js/bun/test/__snapshots__/test-interop.js.snap index c626a5ab56..eb56d73084 100644 --- a/test/js/bun/test/__snapshots__/test-interop.js.snap +++ b/test/js/bun/test/__snapshots__/test-interop.js.snap @@ -1,3 +1,11 @@ // Bun Snapshot v1, https://goo.gl/fbAQLP exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; + +exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; + +exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; + +exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; + +exports[`expect() toMatchSnapshot to return undefined 1`] = `"abc"`; diff --git a/test/js/bun/test/expect.test.js b/test/js/bun/test/expect.test.js index 0055ce0701..33206f2f02 100644 --- a/test/js/bun/test/expect.test.js +++ b/test/js/bun/test/expect.test.js @@ -4733,6 +4733,7 @@ describe("expect()", () => { expect(() => { throw "!"; }).not.toThrow(/ball/); + throw undefined; } catch (e) { expect(e).toBeUndefined(); } @@ -4740,6 +4741,7 @@ describe("expect()", () => { expect(() => { throw "ball"; }).not.toThrow(/ball/); + expect.unreachable(); } catch (e) { expect(e).toBeDefined(); expect(e.message).toContain("Received message: "); diff --git a/test/js/bun/test/jest-extended.test.js b/test/js/bun/test/jest-extended.test.js index be0b6465d7..815a645045 100644 --- a/test/js/bun/test/jest-extended.test.js +++ b/test/js/bun/test/jest-extended.test.js @@ -177,9 +177,9 @@ describe("jest-extended", () => { // Test errors // @ts-expect-error - expect(() => expect(1).toSatisfy(() => new Error("Bun!"))).toThrow("predicate threw an exception"); + expect(() => expect(1).toSatisfy(() => { throw new Error("Bun!") })).toThrow("predicate threw an exception"); // @ts-expect-error - expect(() => expect(1).not.toSatisfy(() => new Error("Bun!"))).toThrow("predicate threw an exception"); + expect(() => expect(1).not.toSatisfy(() => { throw new Error("Bun!") })).toThrow("predicate threw an exception"); }); // Array diff --git a/test/js/node/http/node-http.test.ts b/test/js/node/http/node-http.test.ts index 97b965c5a9..c79b392fe0 100644 --- a/test/js/node/http/node-http.test.ts +++ b/test/js/node/http/node-http.test.ts @@ -25,7 +25,7 @@ import * as stream from "node:stream"; import { PassThrough } from "node:stream"; import url from "node:url"; import * as zlib from "node:zlib"; -const { describe, expect, it, beforeAll, afterAll, createDoneDotAll, mock } = createTest(import.meta.path); +const { describe, expect, it, beforeAll, afterAll, createDoneDotAll, mock, test } = createTest(import.meta.path); function listen(server: Server, protocol: string = "http"): Promise { return new Promise((resolve, reject) => { @@ -162,6 +162,7 @@ describe("node:http", () => { const server = http.createServer(() => {}); const random_port = randomPort(); server.listen(random_port); + await once(server, 'listening'); const { port } = server.address(); expect(port).toEqual(random_port); server.close(); diff --git a/test/js/node/missing-module.test.js b/test/js/node/missing-module.test.js index fd772441a8..96540f19f7 100644 --- a/test/js/node/missing-module.test.js +++ b/test/js/node/missing-module.test.js @@ -1,6 +1,6 @@ import { expect, test } from "bun:test"; -test("not implemented yet module masquerades as undefined and throws an error", () => { +test("not implemented yet module throws an error", () => { const missingModule = "node:missing" + ""; expect(() => require(missingModule)).toThrow(/^Cannot find package "node:missing" from "/); expect(() => import(missingModule)).toThrow(/^Cannot find package "node:missing" from "/); diff --git a/test/regression/issue/03844/03844.test.ts b/test/regression/issue/03844/03844.test.ts index da02ccb7d1..9b2ab81a6d 100644 --- a/test/regression/issue/03844/03844.test.ts +++ b/test/regression/issue/03844/03844.test.ts @@ -12,8 +12,8 @@ test("test bun target", async () => { const content = await blob.text(); // use bun's ws - expect(content).toContain('import {WebSocket} from "ws"'); - expect(content).not.toContain("var websocket = __toESM(require_websocket(), 1);"); + expect(content).toContain('import { WebSocket } from "ws"'); + expect(content).not.toContain("var import_websocket = __toESM(require_websocket(), 1);"); }); test("test node target", async () => { @@ -28,5 +28,5 @@ test("test node target", async () => { // use node's ws expect(content).not.toContain('import {WebSocket} from "ws"'); - expect(content).toContain("var websocket = __toESM(require_websocket(), 1);"); + expect(content).toContain("var import_websocket = __toESM(require_websocket(), 1);"); });