diff --git a/CMakeLists.txt b/CMakeLists.txt index 73813a0d43..2bdaee4f91 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,7 @@ cmake_policy(SET CMP0091 NEW) cmake_policy(SET CMP0067 NEW) set(Bun_VERSION "1.0.23") -set(WEBKIT_TAG b4de09f41b83e9e5c0e43ef414f1aee5968b6f7c) +set(WEBKIT_TAG 776c74d4ee334d4abee953471a40117cd2bbc77d) set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") diff --git a/docs/api/sqlite.md b/docs/api/sqlite.md index 3749fb8b4c..ddc405fb02 100644 --- a/docs/api/sqlite.md +++ b/docs/api/sqlite.md @@ -57,6 +57,23 @@ import { Database } from "bun:sqlite"; const db = new Database("mydb.sqlite", { create: true }); ``` +### Load via ES module import + +You can also use an import attribute to load a database. + +```ts +import db from "./mydb.sqlite" with {"type": "sqlite"}; + +console.log(db.query("select * from users LIMIT 1").get()); +``` + +This is equivalent to the following: + +```ts +import { Database } from "bun:sqlite"; +const db = new Database("./mydb.sqlite"); +``` + ### `.close()` To close a database: diff --git a/docs/bundler/executables.md b/docs/bundler/executables.md index b654ec8102..0b909123ea 100644 --- a/docs/bundler/executables.md +++ b/docs/bundler/executables.md @@ -32,6 +32,25 @@ All imported files and packages are bundled into the executable, along with a co {% /callout %} +## SQLite + +You can use `bun:sqlite` imports with `bun build --compile`. + +By default, the database is resolved relative to the current working directory of the process. + +```js +import db from './my.db' with {type: "sqlite"}; + +console.log(db.query("select * from users LIMIT 1").get()); +``` + +That means if the executable is located at `/usr/bin/hello`, the user's terminal is located at `/home/me/Desktop`, it will look for `/home/me/Desktop/my.db`. + +``` +$ cd /home/me/Desktop +$ ./hello +``` + ## Embedding files Standalone executables support embedding files. @@ -55,6 +74,30 @@ You may need to specify a `--loader` for it to be treated as a `"file"` loader ( Embedded files can be read using `Bun.file`'s functions or the Node.js `fs.readFile` function (in `"node:fs"`). +### Embedding SQLite databases + +If your application wants to embed a SQLite database, set `type: "sqlite"` in the import attribute and the `embed` attribute to `"true"`. + +```js +import myEmbeddedDb from "./my.db" with {type: "sqlite", embed: "true"}; + +console.log(myEmbeddedDb.query("select * from users LIMIT 1").get()); +``` + +This database is read-write, but all changes are lost when the executable exits (since it's stored in memory). + +### Embedding N-API Addons + +As of Bun v1.0.23, you can embed `.node` files into executables. + +```js +const addon = require("./addon.node"); + +console.log(addon.hello()); +``` + +Unfortunately, if you're using `@mapbox/node-pre-gyp` or other similar tools, you'll need to make sure the `.node` file is directly required or it won't bundle correctly. + ## Minification To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller. diff --git a/docs/bundler/loaders.md b/docs/bundler/loaders.md index d013913d65..227093d7db 100644 --- a/docs/bundler/loaders.md +++ b/docs/bundler/loaders.md @@ -171,6 +171,31 @@ console.log(addon); In the bundler, `.node` files are handled using the [`file`](#file) loader. +### `sqlite` + +**SQLite loader**. `with { "type": "sqlite" }` import attribute + +In the runtime and bundler, SQLite databases can be directly imported. This will load the database using [`bun:sqlite`](/docs/api/sqlite.md). + +```ts +import db from "./my.db" with {type: "sqlite"}; +``` + +This is only supported when the `target` is `bun`. + +By default, the database is external to the bundle (so that you can potentially use a database loaded elsewhere), so the database file on-disk won't be bundled into the final output. + +You can change this behavior with the `"embed"` attribute: + +```ts +// embed the database into the bundle +import db from "./my.db" with {type: "sqlite", embed: "true"}; +``` + +When using a [standalone executable](/docs/bundler/executables), the database is embedded into the single-file executable. + +Otherwise, the database to embed is copied into the `outdir` with a hashed filename. + ### `file` **File loader**. Default for all unrecognized file types. diff --git a/docs/runtime/loaders.md b/docs/runtime/loaders.md index 891ed0bffe..6b226823d0 100644 --- a/docs/runtime/loaders.md +++ b/docs/runtime/loaders.md @@ -80,6 +80,17 @@ $ bun run ./my-wasm-app.whatever **Note** — WASI support is based on [wasi-js](https://github.com/sagemathinc/cowasm/tree/main/core/wasi-js). Currently, it only supports WASI binaries that use the `wasi_snapshot_preview1` or `wasi_unstable` APIs. Bun's implementation is not fully optimized for performance; this will become more of a priority as WASM grows in popularity. {% /callout %} +## SQLite + +You can import sqlite databases directly into your code. Bun will automatically load the database and return a `Database` object. + +```ts +import db from "./my.db" with {type: "sqlite"}; +console.log(db.query("select * from users LIMIT 1").get()); +``` + +This uses [`bun:sqlite`](/docs/api/sqlite). + ## Custom loaders Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](/docs/bundler/plugins) for full documentation. diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index c97dd3423a..587e06a80d 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -27,6 +27,7 @@ export const enum Loader { base64 = 11, dataurl = 12, text = 13, + sqlite = 14, } export const LoaderKeys: { 1: "jsx"; @@ -55,6 +56,8 @@ export const LoaderKeys: { dataurl: "dataurl"; 13: "text"; text: "text"; + 14: "sqlite"; + sqlite: "sqlite"; }; export const enum FrameworkEntryPointType { client = 1, diff --git a/src/api/schema.js b/src/api/schema.js index 44d38c36fc..0b99bf24a4 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -12,6 +12,7 @@ const Loader = { "11": 11, "12": 12, "13": 13, + "14": 14, "jsx": 1, "js": 2, "ts": 3, @@ -25,6 +26,7 @@ const Loader = { "base64": 11, "dataurl": 12, "text": 13, + "sqlite": 14, }; const LoaderKeys = { "1": "jsx", @@ -40,6 +42,7 @@ const LoaderKeys = { "11": "base64", "12": "dataurl", "13": "text", + "14": "sqlite", "jsx": "jsx", "js": "js", "ts": "ts", @@ -53,6 +56,7 @@ const LoaderKeys = { "base64": "base64", "dataurl": "dataurl", "text": "text", + "sqlite": "sqlite", }; const FrameworkEntryPointType = { "1": 1, diff --git a/src/api/schema.peechy b/src/api/schema.peechy index 32f1503a14..bc048e5078 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -14,6 +14,7 @@ smol Loader { base64 = 11; dataurl = 12; text = 13; + sqlite = 14; } smol FrameworkEntryPointType { diff --git a/src/api/schema.zig b/src/api/schema.zig index d618522fe7..998b2963da 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -364,6 +364,9 @@ pub const Api = struct { /// text text, + /// sqlite + sqlite, + _, pub fn jsonStringify(self: @This(), writer: anytype) !void { diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index fb25e060db..f4a5dc2474 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -549,6 +549,10 @@ pub const JSBundler = struct { import_record_index: u32 = 0, range: logger.Range = logger.Range.None, original_target: Target, + + pub inline fn loader(_: *const MiniImportRecord) ?options.Loader { + return null; + } }; pub fn create( diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index 6a5c2492ca..61fc060be9 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -3,6 +3,7 @@ #include #include #include +#include "headers-handwritten.h" #include "node_api.h" #include "ZigGlobalObject.h" #include "headers.h" @@ -232,6 +233,8 @@ JSC_DEFINE_CUSTOM_SETTER(Process_defaultSetter, return true; } +extern "C" bool Bun__resolveEmbeddedNodeFile(void*, BunString*); + JSC_DECLARE_HOST_FUNCTION(Process_functionDlopen); JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, (JSC::JSGlobalObject * globalObject_, JSC::CallFrame* callFrame)) @@ -268,6 +271,14 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, } WTF::String filename = callFrame->uncheckedArgument(1).toWTFString(globalObject); + // Support embedded .node files + if (filename.startsWith("/$bunfs/"_s)) { + BunString bunStr = Bun::toString(filename); + if (Bun__resolveEmbeddedNodeFile(globalObject->bunVM(), &bunStr)) { + filename = bunStr.toWTFString(BunString::ZeroCopy); + } + } + RETURN_IF_EXCEPTION(scope, {}); #if OS(WINDOWS) CString utf8 = filename.utf8(); diff --git a/src/bun.js/bindings/CommonJSModuleRecord.cpp b/src/bun.js/bindings/CommonJSModuleRecord.cpp index 7f5201c269..cdcb36451d 100644 --- a/src/bun.js/bindings/CommonJSModuleRecord.cpp +++ b/src/bun.js/bindings/CommonJSModuleRecord.cpp @@ -29,6 +29,7 @@ * different value. In that case, it will have a stale value. */ +#include "headers.h" #include "root.h" #include "headers-handwritten.h" #include "ZigGlobalObject.h" @@ -914,13 +915,39 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionRequireCommonJS, (JSGlobalObject * lexicalGlo BunString specifierStr = Bun::toString(specifier); BunString referrerStr = Bun::toString(referrer); + BunString typeAttributeStr = { BunStringTag::Dead }; + String typeAttribute = String(); + + + // We need to be able to wire in the "type" import attribute from bundled code.. + // so we do it via CommonJS require(). + int32_t previousArgumentCount = callframe->argument(2).asInt32(); + // If they called require(id), skip the check for the type attribute + if (UNLIKELY(previousArgumentCount == 2)) { + JSValue val = callframe->argument(3); + if (val.isObject()) { + JSObject* obj = val.getObject(); + // This getter is expensive and rare. + if (auto typeValue = obj->getIfPropertyExists(globalObject, vm.propertyNames->type)) { + if (typeValue.isString()) { + typeAttribute = typeValue.toWTFString(globalObject); + RETURN_IF_EXCEPTION(throwScope, {}); + typeAttributeStr = Bun::toString(typeAttribute); + } + } + RETURN_IF_EXCEPTION(throwScope, {}); + } + } JSValue fetchResult = Bun::fetchCommonJSModule( globalObject, jsCast(callframe->argument(1)), specifierValue, &specifierStr, - &referrerStr); + &referrerStr, + LIKELY(typeAttribute.isEmpty()) + ? nullptr + : &typeAttributeStr); RELEASE_AND_RETURN(throwScope, JSValue::encode(fetchResult)); } diff --git a/src/bun.js/bindings/ModuleLoader.cpp b/src/bun.js/bindings/ModuleLoader.cpp index c641d8e65d..8f61ecf7b6 100644 --- a/src/bun.js/bindings/ModuleLoader.cpp +++ b/src/bun.js/bindings/ModuleLoader.cpp @@ -423,7 +423,8 @@ JSValue fetchCommonJSModule( JSCommonJSModule* target, JSValue specifierValue, BunString* specifier, - BunString* referrer) + BunString* referrer, + BunString* typeAttribute) { void* bunVM = globalObject->bunVM(); auto& vm = globalObject->vm(); @@ -539,8 +540,7 @@ JSValue fetchCommonJSModule( } } - auto* loader = globalObject->moduleLoader(); - JSMap* registry = jsCast(loader->getDirect(vm, Identifier::fromString(vm, "registry"_s))); + JSMap* registry = globalObject->esmRegistryMap(); auto hasAlreadyLoadedESMVersionSoWeShouldntTranspileItTwice = [&]() -> bool { JSValue entry = registry->get(globalObject, specifierValue); @@ -557,7 +557,7 @@ JSValue fetchCommonJSModule( RELEASE_AND_RETURN(scope, jsNumber(-1)); } - Bun__transpileFile(bunVM, globalObject, specifier, referrer, res, false); + Bun__transpileFile(bunVM, globalObject, specifier, referrer, typeAttribute, res, false); if (res->success && res->result.value.commonJSExportsLen) { target->evaluate(globalObject, specifier->toWTFString(BunString::ZeroCopy), res->result.value); @@ -606,7 +606,8 @@ static JSValue fetchESMSourceCode( Zig::GlobalObject* globalObject, ErrorableResolvedSource* res, BunString* specifier, - BunString* referrer) + BunString* referrer, + BunString* typeAttribute) { void* bunVM = globalObject->bunVM(); auto& vm = globalObject->vm(); @@ -708,12 +709,12 @@ static JSValue fetchESMSourceCode( } if constexpr (allowPromise) { - void* pendingCtx = Bun__transpileFile(bunVM, globalObject, specifier, referrer, res, true); + void* pendingCtx = Bun__transpileFile(bunVM, globalObject, specifier, referrer, typeAttribute, res, true); if (pendingCtx) { return reinterpret_cast(pendingCtx); } } else { - Bun__transpileFile(bunVM, globalObject, specifier, referrer, res, false); + Bun__transpileFile(bunVM, globalObject, specifier, referrer, typeAttribute, res, false); } if (res->success && res->result.value.commonJSExportsLen) { @@ -833,17 +834,19 @@ JSValue fetchESMSourceCodeSync( Zig::GlobalObject* globalObject, ErrorableResolvedSource* res, BunString* specifier, - BunString* referrer) + BunString* referrer, + BunString* typeAttribute) { - return fetchESMSourceCode(globalObject, res, specifier, referrer); + return fetchESMSourceCode(globalObject, res, specifier, referrer, typeAttribute); } JSValue fetchESMSourceCodeAsync( Zig::GlobalObject* globalObject, ErrorableResolvedSource* res, BunString* specifier, - BunString* referrer) + BunString* referrer, + BunString* typeAttribute) { - return fetchESMSourceCode(globalObject, res, specifier, referrer); + return fetchESMSourceCode(globalObject, res, specifier, referrer, typeAttribute); } } diff --git a/src/bun.js/bindings/ModuleLoader.h b/src/bun.js/bindings/ModuleLoader.h index d3bfec337c..2e1bb6e0bf 100644 --- a/src/bun.js/bindings/ModuleLoader.h +++ b/src/bun.js/bindings/ModuleLoader.h @@ -90,19 +90,22 @@ JSValue fetchESMSourceCodeSync( Zig::GlobalObject* globalObject, ErrorableResolvedSource* res, BunString* specifier, - BunString* referrer); + BunString* referrer, + BunString* typeAttribute); JSValue fetchESMSourceCodeAsync( Zig::GlobalObject* globalObject, ErrorableResolvedSource* res, BunString* specifier, - BunString* referrer); + BunString* referrer, + BunString* typeAttribute); JSValue fetchCommonJSModule( Zig::GlobalObject* globalObject, JSCommonJSModule* moduleObject, JSValue specifierValue, BunString* specifier, - BunString* referrer); + BunString* referrer, + BunString* typeAttribute); } // namespace Bun diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 8439848f47..0868cdb81e 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -66,6 +66,8 @@ #include "wtf/text/StringImpl.h" #include "wtf/text/StringView.h" #include "wtf/text/WTFString.h" +#include "JavaScriptCore/JSScriptFetchParameters.h" +#include "JavaScriptCore/ScriptFetchParameters.h" #include "wtf/text/Base64.h" // #include "JavaScriptCore/CachedType.h" @@ -723,7 +725,8 @@ JSC_DEFINE_HOST_FUNCTION(functionFulfillModuleSync, reinterpret_cast(globalObject), &res, &specifier, - &specifier); + &specifier, + nullptr); if (scope.exception() || !result) { RELEASE_AND_RETURN(scope, JSValue::encode(JSC::jsUndefined())); @@ -4182,7 +4185,7 @@ static JSC::JSInternalPromise* rejectedInternalPromise(JSC::JSGlobalObject* glob JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalObject, JSModuleLoader* loader, JSValue key, - JSValue sourceValue, JSValue value2) + JSValue parameters, JSValue script) { JSC::VM& vm = globalObject->vm(); @@ -4197,11 +4200,23 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalOb } auto moduleKeyBun = Bun::toString(moduleKey); - auto sourceString = sourceValue.isString() - ? sourceValue.toWTFString(globalObject) - : String("undefined"_s); // WASM entry point expet "undefined" as the referrer. + auto sourceString = String("undefined"_s); + auto typeAttributeString = String(); + + if (parameters && parameters.isCell()) { + JSCell* parametersCell = parameters.asCell(); + if (parametersCell->type() == JSScriptFetchParametersType) { + auto* obj = jsCast(parametersCell); + const auto& params = obj->parameters(); + + if (params.type() == ScriptFetchParameters::Type::HostDefined) { + typeAttributeString = params.hostDefinedImportType(); + } + } + } auto source = Bun::toString(sourceString); + auto typeAttribute = Bun::toString(typeAttributeString); ErrorableResolvedSource res; res.success = false; res.result.err.code = 0; @@ -4211,7 +4226,8 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderFetch(JSGlobalObject* globalOb reinterpret_cast(globalObject), &res, &moduleKeyBun, - &source); + &source, + typeAttributeString.isEmpty() ? nullptr : &typeAttribute); if (auto* internalPromise = JSC::jsDynamicCast(result)) { return internalPromise; diff --git a/src/bun.js/bindings/headers-handwritten.h b/src/bun.js/bindings/headers-handwritten.h index 681805ee00..aef02528ea 100644 --- a/src/bun.js/bindings/headers-handwritten.h +++ b/src/bun.js/bindings/headers-handwritten.h @@ -316,6 +316,7 @@ extern "C" void* Bun__transpileFile( JSC::JSGlobalObject* global, const BunString* specifier, const BunString* referrer, + const BunString* typeAttribute, ErrorableResolvedSource* result, bool allowPromise); extern "C" JSC::EncodedJSValue CallbackJob__onResolve(JSC::JSGlobalObject*, JSC::CallFrame*); diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 05c88116be..33a2987128 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -1934,6 +1934,49 @@ pub const ModuleLoader = struct { ); }, + .sqlite_embedded, .sqlite => { + const sqlite_module_source_code_string = brk: { + if (jsc_vm.hot_reload == .hot) { + break :brk + \\// Generated code + \\import {Database} from 'bun:sqlite'; + \\const {path} = import.meta; + \\ + \\// Don't reload the database if it's already loaded + \\const registry = (globalThis[Symbol.for("bun:sqlite:hot")] ??= new Map()); + \\ + \\export let db = registry.get(path); + \\export const __esModule = true; + \\if (!db) { + \\ // Load the database + \\ db = new Database(path); + \\ registry.set(path, db); + \\} + \\ + \\export default db; + ; + } + + break :brk + \\// Generated code + \\import {Database} from 'bun:sqlite'; + \\export const db = new Database(import.meta.path); + \\ + \\export const __esModule = true; + \\export default db; + ; + }; + + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.create(sqlite_module_source_code_string), + .specifier = input_specifier, + .source_url = if (input_specifier.eqlUTF8(path.text)) input_specifier.dupeRef() else String.init(path.text), + .tag = .esm, + .hash = 0, + }; + }, + else => { var stack_buf = std.heap.stackFallback(4096, jsc_vm.allocator); const allocator = stack_buf.get(); @@ -2041,6 +2084,7 @@ pub const ModuleLoader = struct { globalObject: *JSC.JSGlobalObject, specifier_ptr: *const bun.String, referrer: *const bun.String, + type_attribute: ?*const bun.String, ret: *ErrorableResolvedSource, allow_promise: bool, ) ?*anyopaque { @@ -2074,6 +2118,12 @@ pub const ModuleLoader = struct { } } + if (type_attribute) |attribute| { + if (attribute.eqlComptime("sqlite")) { + loader = .sqlite; + } + } + // We only run the transpiler concurrently when we can. // Today, that's: // @@ -2277,6 +2327,26 @@ pub const ModuleLoader = struct { const specifier_utf8 = specifier.toUTF8(bun.default_allocator); defer specifier_utf8.deinit(); if (graph.files.get(specifier_utf8.slice())) |file| { + if (file.loader == .sqlite or file.loader == .sqlite_embedded) { + const code = + \\/* Generated code */ + \\import {Database} from 'bun:sqlite'; + \\import {readFileSync} from 'node:fs'; + \\export const db = new Database(readFileSync(import.meta.path)); + \\ + \\export const __esModule = true; + \\export default db; + ; + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.init(code), + .specifier = specifier, + .source_url = specifier.dupeRef(), + .hash = 0, + .needs_deref = false, + }; + } + return ResolvedSource{ .allocator = null, .source_code = bun.String.static(file.contents), @@ -2723,3 +2793,50 @@ pub const HardcodedModule = enum { } }; }; + +/// Support embedded .node files +export fn Bun__resolveEmbeddedNodeFile(vm: *JSC.VirtualMachine, in_out_str: *bun.String) bool { + var graph = vm.standalone_module_graph orelse return false; + const utf8 = in_out_str.toUTF8(bun.default_allocator); + defer utf8.deinit(); + const file = graph.find(utf8.slice()) orelse return false; + + if (comptime Environment.isLinux) { + // TODO: use /proc/fd/12346 instead! Avoid the copy! + } + + // atomically write to a tmpfile and then move it to the final destination + var tmpname_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const tmpfilename = bun.sliceTo(bun.fs.FileSystem.instance.tmpname("node", &tmpname_buf, bun.hash(file.name)) catch return false, 0); + + const tmpdir = bun.fs.FileSystem.instance.tmpdir(); + + // First we open the tmpfile, to avoid any other work in the event of failure. + const tmpfile = bun.Tmpfile.create(bun.toFD(tmpdir.fd), tmpfilename).unwrap() catch return false; + defer { + _ = bun.sys.close(tmpfile.fd); + } + + switch (JSC.Node.NodeFS.writeFileWithPathBuffer( + &tmpname_buf, // not used + + .{ + .data = .{ + .encoded_slice = JSC.ZigString.Slice.fromUTF8NeverFree(file.contents), + }, + .dirfd = bun.toFD(tmpdir.fd), + .file = .{ + .fd = tmpfile.fd, + }, + .encoding = .buffer, + }, + )) { + .err => { + return false; + }, + else => {}, + } + + in_out_str.* = bun.String.create(bun.path.joinAbs(bun.fs.FileSystem.instance.fs.tmpdirPath(), .auto, tmpfilename)); + return true; +} diff --git a/src/bundler.zig b/src/bundler.zig index 4ae07ace9a..8a0b86ab99 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -1043,7 +1043,7 @@ pub const Bundler = struct { output_file.value = .{ .move = file_op }; }, - .wasm, .file, .napi => { + .sqlite_embedded, .sqlite, .wasm, .file, .napi => { const hashed_name = try bundler.linker.getHashedFilename(file_path, null); var pathname = try bundler.allocator.alloc(u8, hashed_name.len + file_path.name.ext.len); bun.copy(u8, pathname, hashed_name); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 2d7d9664a2..19953f9835 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -595,7 +595,16 @@ pub const BundleV2 = struct { entry.value_ptr.* = source_index.get(); out_source_index = source_index; this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable; - const loader = path.loader(&this.bundler.options.loaders) orelse options.Loader.file; + const loader = brk: { + if (import_record.importer_source_index) |importer| { + var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index]; + if (record.loader()) |out_loader| { + break :brk out_loader; + } + } + + break :brk path.loader(&this.bundler.options.loaders) orelse options.Loader.file; + }; this.graph.input_files.append(bun.default_allocator, .{ .source = .{ @@ -1067,7 +1076,9 @@ pub const BundleV2 = struct { const sources = this.graph.input_files.items(.source); var additional_output_files = std.ArrayList(options.OutputFile).init(this.bundler.allocator); - var additional_files: []BabyList(AdditionalFile) = this.graph.input_files.items(.additional_files); + const additional_files: []BabyList(AdditionalFile) = this.graph.input_files.items(.additional_files); + const loaders = this.graph.input_files.items(.loader); + for (reachable_files) |reachable_source| { const index = reachable_source.get(); const key = unique_key_for_additional_files[index]; @@ -1092,7 +1103,7 @@ pub const BundleV2 = struct { template.placeholder.hash = content_hashes_for_additional_files[index]; } - const loader = source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file; + const loader = loaders[index]; additional_output_files.append( options.OutputFile.init( @@ -1896,6 +1907,16 @@ pub const BundleV2 = struct { } } + // By default, we treat .sqlite files as external. + if (import_record.tag == .with_type_sqlite) { + import_record.is_external_without_side_effects = true; + continue; + } + + if (import_record.tag == .with_type_sqlite_embedded) { + import_record.is_external_without_side_effects = true; + } + if (this.enqueueOnResolvePluginIfNeeded(source.index.get(), import_record, source.path.text, @as(u32, @truncate(i)), ast.target)) { continue; } @@ -2024,6 +2045,10 @@ pub const BundleV2 = struct { resolve_task.jsx.development = resolve_result.jsx.development; + if (import_record.tag.loader()) |loader| { + resolve_task.loader = loader; + } + if (resolve_task.loader == null) { resolve_task.loader = path.loader(&this.bundler.options.loaders); resolve_task.tree_shaking = this.bundler.options.tree_shaking; @@ -2501,6 +2526,116 @@ pub const ParseTask = struct { }, Logger.Loc{ .start = 0 }); return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); }, + + .sqlite_embedded, .sqlite => { + if (!bundler.options.target.isBun()) { + log.addError( + null, + Logger.Loc.Empty, + "To use the \"sqlite\" loader, set target to \"bun\"", + ) catch bun.outOfMemory(); + return error.ParserError; + } + + const path_to_use = brk: { + // Implements embedded sqlite + if (loader == .sqlite_embedded) { + const embedded_path = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; + unique_key_for_additional_file.* = embedded_path; + break :brk embedded_path; + } + + break :brk source.path.text; + }; + + // This injects the following code: + // + // import.meta.require(unique_key).db + // + const import_path = Expr.init(E.String, E.String{ + .data = path_to_use, + }, Logger.Loc{ .start = 0 }); + + const import_meta = Expr.init(E.ImportMeta, E.ImportMeta{}, Logger.Loc{ .start = 0 }); + const require_property = Expr.init(E.Dot, E.Dot{ + .target = import_meta, + .name_loc = Logger.Loc.Empty, + .name = "require", + }, Logger.Loc{ .start = 0 }); + const require_args = allocator.alloc(Expr, 2) catch unreachable; + require_args[0] = import_path; + const object_properties = allocator.alloc(G.Property, 1) catch unreachable; + object_properties[0] = G.Property{ + .key = Expr.init(E.String, E.String{ + .data = "type", + }, Logger.Loc{ .start = 0 }), + .value = Expr.init(E.String, E.String{ + .data = "sqlite", + }, Logger.Loc{ .start = 0 }), + }; + require_args[1] = Expr.init(E.Object, E.Object{ + .properties = G.Property.List.init(object_properties), + .is_single_line = true, + }, Logger.Loc{ .start = 0 }); + const require_call = Expr.init(E.Call, E.Call{ + .target = require_property, + .args = BabyList(Expr).init(require_args), + }, Logger.Loc{ .start = 0 }); + + const root = Expr.init(E.Dot, E.Dot{ + .target = require_call, + .name_loc = Logger.Loc.Empty, + .name = "db", + }, Logger.Loc{ .start = 0 }); + + return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + }, + .napi => { + if (bundler.options.target == .node) { + log.addError( + null, + Logger.Loc.Empty, + "TODO: implement .node loader for Node.js target", + ) catch bun.outOfMemory(); + return error.ParserError; + } + + if (bundler.options.target != .bun) { + log.addError( + null, + Logger.Loc.Empty, + "To load .node files, set target to \"bun\"", + ) catch bun.outOfMemory(); + return error.ParserError; + } + + const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; + // This injects the following code: + // + // import.meta.require(unique_key) + // + const import_path = Expr.init(E.String, E.String{ + .data = unique_key, + }, Logger.Loc{ .start = 0 }); + + // TODO: e_require_string + const import_meta = Expr.init(E.ImportMeta, E.ImportMeta{}, Logger.Loc{ .start = 0 }); + const require_property = Expr.init(E.Dot, E.Dot{ + .target = import_meta, + .name_loc = Logger.Loc.Empty, + .name = "require", + }, Logger.Loc{ .start = 0 }); + const require_args = allocator.alloc(Expr, 1) catch unreachable; + require_args[0] = import_path; + const require_call = Expr.init(E.Call, E.Call{ + .target = require_property, + .args = BabyList(Expr).init(require_args), + }, Logger.Loc{ .start = 0 }); + + const root = require_call; + unique_key_for_additional_file.* = unique_key; + return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + }, // TODO: css else => { const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; diff --git a/src/bundler/entry_points.zig b/src/bundler/entry_points.zig index d87edea0ab..86bc2369cb 100644 --- a/src/bundler/entry_points.zig +++ b/src/bundler/entry_points.zig @@ -157,25 +157,6 @@ pub const ClientEntryPoint = struct { } }; -const QuoteEscapeFormat = struct { - data: []const u8, - - pub fn format(self: QuoteEscapeFormat, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - var i: usize = 0; - while (std.mem.indexOfAnyPos(u8, self.data, i, "\"\n\\")) |j| : (i = j + 1) { - try writer.writeAll(self.data[i..j]); - try writer.writeAll(switch (self.data[j]) { - '"' => "\\\"", - '\n' => "\\n", - '\\' => "\\\\", - else => unreachable, - }); - } - if (i == self.data.len) return; - try writer.writeAll(self.data[i..]); - } -}; - pub const ServerEntryPoint = struct { source: logger.Source = undefined, @@ -218,7 +199,7 @@ pub const ServerEntryPoint = struct { \\ , .{ - QuoteEscapeFormat{ .data = path_to_use }, + strings.QuoteEscapeFormat{ .data = path_to_use }, }, ); } @@ -239,7 +220,7 @@ pub const ServerEntryPoint = struct { \\ , .{ - QuoteEscapeFormat{ .data = path_to_use }, + strings.QuoteEscapeFormat{ .data = path_to_use }, }, ); }; diff --git a/src/import_record.zig b/src/import_record.zig index d48cfb1b0c..c21019b693 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -181,6 +181,10 @@ pub const ImportRecord = struct { pub const List = bun.BabyList(ImportRecord); + pub fn loader(this: *const ImportRecord) ?bun.options.Loader { + return this.tag.loader(); + } + pub const Tag = enum { none, /// JSX auto-import for React Fast Refresh @@ -208,6 +212,17 @@ pub const ImportRecord = struct { /// We don't actually support this right now. react_server_component, + with_type_sqlite, + with_type_sqlite_embedded, + + pub fn loader(this: Tag) ?bun.options.Loader { + return switch (this) { + .with_type_sqlite => .sqlite, + .with_type_sqlite_embedded => .sqlite_embedded, + else => null, + }; + } + pub fn isReactReference(this: Tag) bool { return switch (this) { .react_client_component, .react_server_component => true, diff --git a/src/js/builtins.d.ts b/src/js/builtins.d.ts index e6488bb440..5afd759b59 100644 --- a/src/js/builtins.d.ts +++ b/src/js/builtins.d.ts @@ -107,7 +107,7 @@ declare function $isArrayIterator(obj: unknown): obj is Iterator; declare function $isMapIterator(obj: unknown): obj is Iterator; declare function $isSetIterator(obj: unknown): obj is Iterator; declare function $isUndefinedOrNull(obj: unknown): obj is null | undefined; -declare function $tailCallForwardArguments(): TODO; +declare function $tailCallForwardArguments(fn: CallableFunction, thisValue: ThisType): any; /** * **NOTE** - use `throw new TypeError()` instead. it compiles to the same builtin * @deprecated diff --git a/src/js/builtins/JSBufferPrototype.ts b/src/js/builtins/JSBufferPrototype.ts index 53dbc8b57e..cf5b3fa3a7 100644 --- a/src/js/builtins/JSBufferPrototype.ts +++ b/src/js/builtins/JSBufferPrototype.ts @@ -462,7 +462,7 @@ export function slice(this: BufferExt, start, end) { function adjustOffset(offset, length) { // Use Math.trunc() to convert offset to an integer value that can be larger // than an Int32. Hence, don't use offset | 0 or similar techniques. - offset = $trunc(offset); + offset = Math.trunc(offset); if (offset === 0 || offset !== offset) { return 0; } else if (offset < 0) { diff --git a/src/js/builtins/Module.ts b/src/js/builtins/Module.ts index 201890ecd0..01ca8db21c 100644 --- a/src/js/builtins/Module.ts +++ b/src/js/builtins/Module.ts @@ -5,7 +5,7 @@ export function main() { $visibility = "Private"; export function require(this: CommonJSModuleRecord, id: string) { - return $overridableRequire.$call(this, id); + return $tailCallForwardArguments($overridableRequire, this); } // overridableRequire can be overridden by setting `Module.prototype.require` @@ -50,7 +50,15 @@ export function overridableRequire(this: CommonJSModuleRecord, id: string) { // Note: we do not need to wrap this in a try/catch, if it throws the C++ code will // clear the module from the map. // - var out = this.$require(id, mod); + var out = this.$require( + id, + mod, + // did they pass a { type } object? + $argumentCount(), + // the object containing a "type" attribute, if they passed one + // maybe this will be "paths" in the future too. + arguments[1], + ); // -1 means we need to lookup the module from the ESM registry. if (out === -1) { diff --git a/src/js/private.d.ts b/src/js/private.d.ts index 5f3c5bab99..e7564e6b72 100644 --- a/src/js/private.d.ts +++ b/src/js/private.d.ts @@ -211,7 +211,7 @@ interface BunLazyModules { declare var $exports: any; interface CommonJSModuleRecord { - $require(id: string, mod: any): any; + $require(id: string, mod: any, args_count: number, args: Array): any; children: CommonJSModuleRecord[]; exports: any; id: string; diff --git a/src/js_parser.zig b/src/js_parser.zig index 01447cb4e0..515f760876 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -2502,6 +2502,7 @@ const ParsedPath = struct { loc: logger.Loc, text: string, is_macro: bool, + import_tag: ImportRecord.Tag = .none, }; const StrictModeFeature = enum { @@ -8779,11 +8780,34 @@ fn NewParser_( item_refs.shrinkAndFree(stmt.items.len + @as(usize, @intFromBool(stmt.default_name != null))); } + if (path.import_tag != .none) { + try p.validateSQLiteImportType(path.import_tag, &stmt); + } + // Track the items for this namespace try p.import_items_for_namespace.put(p.allocator, stmt.namespace_ref, item_refs); return p.s(stmt, loc); } + fn validateSQLiteImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void { + @setCold(true); + + if (import_tag == .with_type_sqlite or import_tag == .with_type_sqlite_embedded) { + p.import_records.items[stmt.import_record_index].tag = import_tag; + + for (stmt.items) |*item| { + if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) { + try p.log.addError( + p.source, + item.name.loc, + "sqlite imports only support the \"default\" or \"db\" imports", + ); + break; + } + } + } + } + // This is the type parameter declarations that go with other symbol // declarations (class, function, type, etc.) fn skipTypeScriptTypeParameters(p: *P, flags: TypeParameterFlag) anyerror!SkipTypeParameterResult { @@ -9409,6 +9433,8 @@ fn NewParser_( if (path.is_macro) { try p.log.addError(p.source, path.loc, "cannot use macro in export statement"); + } else if (path.import_tag != .none) { + try p.log.addError(p.source, loc, "cannot use export statement with \"type\" attribute"); } if (comptime track_symbol_usage_during_parse_pass) { @@ -9449,6 +9475,8 @@ fn NewParser_( if (parsedPath.is_macro) { try p.log.addError(p.source, loc, "export from cannot be used with \"type\": \"macro\""); + } else if (parsedPath.import_tag != .none) { + try p.log.addError(p.source, loc, "export from cannot be used with \"type\" attribute"); } const import_record_index = p.addImportRecord(.stmt, parsedPath.loc, parsedPath.text); @@ -11463,6 +11491,7 @@ fn NewParser_( .loc = p.lexer.loc(), .text = p.lexer.string_literal_slice, .is_macro = false, + .import_tag = .none, }; if (p.lexer.token == .t_no_substitution_template_literal) { @@ -11483,28 +11512,68 @@ fn NewParser_( try p.lexer.next(); try p.lexer.expect(.t_open_brace); + const SupportedAttribute = enum { + type, + embed, + }; + + var has_seen_embed_true = false; + while (p.lexer.token != .t_close_brace) { - const is_type_flag = brk: { + const supported_attribute: ?SupportedAttribute = brk: { // Parse the key if (p.lexer.isIdentifierOrKeyword()) { - break :brk strings.eqlComptime(p.lexer.identifier, "type"); + if (strings.eqlComptime(p.lexer.identifier, "type")) { + break :brk .type; + } + + if (strings.eqlComptime(p.lexer.identifier, "embed")) { + break :brk .embed; + } } else if (p.lexer.token == .t_string_literal) { - break :brk p.lexer.string_literal_is_ascii and strings.eqlComptime(p.lexer.string_literal_slice, "type"); + if (p.lexer.string_literal_is_ascii) { + if (strings.eqlComptime(p.lexer.string_literal_slice, "type")) { + break :brk .type; + } + + if (strings.eqlComptime(p.lexer.string_literal_slice, "embed")) { + break :brk .embed; + } + } } else { try p.lexer.expect(.t_identifier); } - break :brk false; + break :brk null; }; try p.lexer.next(); try p.lexer.expect(.t_colon); try p.lexer.expect(.t_string_literal); - if (is_type_flag and - p.lexer.string_literal_is_ascii and strings.eqlComptime(p.lexer.string_literal_slice, "macro")) - { - path.is_macro = true; + if (p.lexer.string_literal_is_ascii) { + if (supported_attribute) |attr| { + switch (attr) { + .type => { + if (strings.eqlComptime(p.lexer.string_literal_slice, "macro")) { + path.is_macro = true; + } else if (strings.eqlComptime(p.lexer.string_literal_slice, "sqlite")) { + path.import_tag = .with_type_sqlite; + if (has_seen_embed_true) { + path.import_tag = .with_type_sqlite_embedded; + } + } + }, + .embed => { + if (strings.eqlComptime(p.lexer.string_literal_slice, "true")) { + has_seen_embed_true = true; + if (path.import_tag == .with_type_sqlite) { + path.import_tag = .with_type_sqlite_embedded; + } + } + }, + } + } } if (p.lexer.token != .t_comma) { diff --git a/src/js_printer.zig b/src/js_printer.zig index b9e99c9afb..4d6b9fb6eb 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -4708,6 +4708,11 @@ fn NewPrinter( } p.printImportRecordPath(record); + + if ((record.tag.loader() orelse options.Loader.file).isSQLite()) { + // we do not preserve "embed": "true" since it is not necessary + p.printWhitespacer(ws(" with { type: \"sqlite\" }")); + } p.printSemicolonAfterStatement(); }, .s_block => |s| { diff --git a/src/options.zig b/src/options.zig index 89ee5b9966..cc08f0dfcb 100644 --- a/src/options.zig +++ b/src/options.zig @@ -650,12 +650,24 @@ pub const Loader = enum(u8) { base64, dataurl, text, + sqlite, + sqlite_embedded, + + pub inline fn isSQLite(this: Loader) bool { + return switch (this) { + .sqlite, .sqlite_embedded => true, + else => false, + }; + } pub fn shouldCopyForBundling(this: Loader) bool { return switch (this) { .file, // TODO: CSS .css, + .napi, + .sqlite, + .sqlite_embedded, => true, else => false, }; @@ -745,6 +757,8 @@ pub const Loader = enum(u8) { .{ "base64", Loader.base64 }, .{ "txt", Loader.text }, .{ "text", Loader.text }, + .{ "sqlite", Loader.sqlite }, + .{ "sqlite_embedded", Loader.sqlite_embedded }, }); pub const api_names = bun.ComptimeStringMap(Api.Loader, .{ @@ -766,6 +780,7 @@ pub const Loader = enum(u8) { .{ "base64", Api.Loader.base64 }, .{ "txt", Api.Loader.text }, .{ "text", Api.Loader.text }, + .{ "sqlite", Api.Loader.sqlite }, }); pub fn fromString(slice_: string) ?Loader { @@ -799,6 +814,7 @@ pub const Loader = enum(u8) { .base64 => .base64, .dataurl => .dataurl, .text => .text, + .sqlite_embedded, .sqlite => .sqlite, }; } @@ -817,6 +833,7 @@ pub const Loader = enum(u8) { .base64 => .base64, .dataurl => .dataurl, .text => .text, + .sqlite => .sqlite, else => .file, }; } diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 15ec586f0f..0169c7b1cf 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -5637,3 +5637,22 @@ pub fn visibleUTF16Width(input: []const u16) usize { pub fn visibleLatin1Width(input: []const u8) usize { return visibleASCIIWidth(input); } + +pub const QuoteEscapeFormat = struct { + data: []const u8, + + pub fn format(self: QuoteEscapeFormat, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + var i: usize = 0; + while (std.mem.indexOfAnyPos(u8, self.data, i, "\"\n\\")) |j| : (i = j + 1) { + try writer.writeAll(self.data[i..j]); + try writer.writeAll(switch (self.data[j]) { + '"' => "\\\"", + '\n' => "\\n", + '\\' => "\\\\", + else => unreachable, + }); + } + if (i == self.data.len) return; + try writer.writeAll(self.data[i..]); + } +}; diff --git a/test/bundler/bundler_bun.test.ts b/test/bundler/bundler_bun.test.ts new file mode 100644 index 0000000000..4c81618bb4 --- /dev/null +++ b/test/bundler/bundler_bun.test.ts @@ -0,0 +1,46 @@ +// @known-failing-on-windows: panic "TODO on Windows" +import assert from "assert"; +import dedent from "dedent"; +import { ESBUILD, itBundled, testForFile } from "./expectBundled"; +import { Database } from "bun:sqlite"; +var { describe, test, expect } = testForFile(import.meta.path); + +describe("bundler", () => { + itBundled("bun/embedded-sqlite-file", { + target: "bun", + outfile: "", + outdir: "/out", + + files: { + "/entry.ts": /* js */ ` + import db from './db.sqlite' with {type: "sqlite", embed: "true"}; + console.log(db.query("select message from messages LIMIT 1").get().message); + `, + "/db.sqlite": (() => { + const db = new Database(":memory:"); + db.exec("create table messages (message text)"); + db.exec("insert into messages values ('Hello, world!')"); + return db.serialize(); + })(), + }, + run: { stdout: "Hello, world!" }, + }); + itBundled("bun/sqlite-file", { + target: "bun", + files: { + "/entry.ts": /* js */ ` + import db from './db.sqlite' with {type: "sqlite"}; + console.log(db.query("select message from messages LIMIT 1").get().message); + `, + }, + runtimeFiles: { + "/db.sqlite": (() => { + const db = new Database(":memory:"); + db.exec("create table messages (message text)"); + db.exec("insert into messages values ('Hello, world!')"); + return db.serialize(); + })(), + }, + run: { stdout: "Hello, world!", setCwd: true }, + }); +}); diff --git a/test/bundler/bundler_compile.test.ts b/test/bundler/bundler_compile.test.ts index 85fb8b0c76..11370f213e 100644 --- a/test/bundler/bundler_compile.test.ts +++ b/test/bundler/bundler_compile.test.ts @@ -2,6 +2,7 @@ import assert from "assert"; import dedent from "dedent"; import { ESBUILD, itBundled, testForFile } from "./expectBundled"; +import { Database } from "bun:sqlite"; var { describe, test, expect } = testForFile(import.meta.path); describe("bundler", () => { @@ -179,4 +180,38 @@ describe("bundler", () => { }, compile: true, }); + itBundled("compile/embedded-sqlite-file", { + compile: true, + files: { + "/entry.ts": /* js */ ` + import db from './db.sqlite' with {type: "sqlite", embed: "true"}; + console.log(db.query("select message from messages LIMIT 1").get().message); + `, + "/db.sqlite": (() => { + const db = new Database(":memory:"); + db.exec("create table messages (message text)"); + db.exec("insert into messages values ('Hello, world!')"); + return db.serialize(); + })(), + }, + run: { stdout: "Hello, world!" }, + }); + itBundled("compile/sqlite-file", { + compile: true, + files: { + "/entry.ts": /* js */ ` + import db from './db.sqlite' with {type: "sqlite"}; + console.log(db.query("select message from messages LIMIT 1").get().message); + `, + }, + runtimeFiles: { + "/db.sqlite": (() => { + const db = new Database(":memory:"); + db.exec("create table messages (message text)"); + db.exec("insert into messages values ('Hello, world!')"); + return db.serialize(); + })(), + }, + run: { stdout: "Hello, world!", setCwd: true }, + }); }); diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index 6ccb28ea81..d81078d8bc 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -89,9 +89,9 @@ export interface BundlerTestInput { todo?: boolean; // file options - files: Record; + files: Record; /** Files to be written only after the bundle is done. */ - runtimeFiles?: Record; + runtimeFiles?: Record; /** Defaults to the first item in `files` */ entryPoints?: string[]; /** ??? */ @@ -498,7 +498,9 @@ function expectBundled( for (const [file, contents] of Object.entries(files)) { const filename = path.join(root, file); mkdirSync(path.dirname(filename), { recursive: true }); - writeFileSync(filename, dedent(contents).replace(/\{\{root\}\}/g, root)); + const formattedContents = + typeof contents === "string" ? dedent(contents).replace(/\{\{root\}\}/g, root) : contents; + writeFileSync(filename, formattedContents); } if (useDefineForClassFields !== undefined) { @@ -1159,7 +1161,9 @@ for (const [key, blob] of build.outputs) { // Write runtime files to disk as well as run the post bundle hook. for (const [file, contents] of Object.entries(runtimeFiles ?? {})) { mkdirSync(path.dirname(path.join(root, file)), { recursive: true }); - writeFileSync(path.join(root, file), dedent(contents).replace(/\{\{root\}\}/g, root)); + const formattedContents = + typeof contents === "string" ? dedent(contents).replace(/\{\{root\}\}/g, root) : contents; + writeFileSync(path.join(root, file), formattedContents); } if (onAfterBundle) {