diff --git a/docs/bundler/index.mdx b/docs/bundler/index.mdx index e5e2ed5268..88a301f550 100644 --- a/docs/bundler/index.mdx +++ b/docs/bundler/index.mdx @@ -220,6 +220,78 @@ An array of paths corresponding to the entrypoints of our application. One bundl +### files + +A map of file paths to their contents for in-memory bundling. This allows you to bundle virtual files that don't exist on disk, or override the contents of files that do exist. This option is only available in the JavaScript API. + +File contents can be provided as a `string`, `Blob`, `TypedArray`, or `ArrayBuffer`. + +#### Bundle entirely from memory + +You can bundle code without any files on disk by providing all sources via `files`: + +```ts title="build.ts" icon="/icons/typescript.svg" +const result = await Bun.build({ + entrypoints: ["/app/index.ts"], + files: { + "/app/index.ts": ` + import { greet } from "./greet.ts"; + console.log(greet("World")); + `, + "/app/greet.ts": ` + export function greet(name: string) { + return "Hello, " + name + "!"; + } + `, + }, +}); + +const output = await result.outputs[0].text(); +console.log(output); +``` + +When all entrypoints are in the `files` map, the current working directory is used as the root. + +#### Override files on disk + +In-memory files take priority over files on disk. This lets you override specific files while keeping the rest of your codebase unchanged: + +```ts title="build.ts" icon="/icons/typescript.svg" +// Assume ./src/config.ts exists on disk with development settings +await Bun.build({ + entrypoints: ["./src/index.ts"], + files: { + // Override config.ts with production values + "./src/config.ts": ` + export const API_URL = "https://api.production.com"; + export const DEBUG = false; + `, + }, + outdir: "./dist", +}); +``` + +#### Mix disk and virtual files + +Real files on disk can import virtual files, and virtual files can import real files: + +```ts title="build.ts" icon="/icons/typescript.svg" +// ./src/index.ts exists on disk and imports "./generated.ts" +await Bun.build({ + entrypoints: ["./src/index.ts"], + files: { + // Provide a virtual file that index.ts imports + "./src/generated.ts": ` + export const BUILD_ID = "${crypto.randomUUID()}"; + export const BUILD_TIME = ${Date.now()}; + `, + }, + outdir: "./dist", +}); +``` + +This is useful for code generation, injecting build-time constants, or testing with mock modules. + ### outdir The directory where output files will be written. diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index 6ab778c97f..bf6e80c9de 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -1979,6 +1979,65 @@ declare module "bun" { */ reactFastRefresh?: boolean; + /** + * A map of file paths to their contents for in-memory bundling. + * + * This allows you to bundle virtual files that don't exist on disk, or override + * the contents of files that do exist on disk. The keys are file paths (which should + * match how they're imported) and the values are the file contents. + * + * File contents can be provided as: + * - `string` - The source code as a string + * - `Blob` - A Blob containing the source code + * - `NodeJS.TypedArray` - A typed array (e.g., `Uint8Array`) containing the source code + * - `ArrayBufferLike` - An ArrayBuffer containing the source code + * + * @example + * ```ts + * // Bundle entirely from memory (no files on disk needed) + * await Bun.build({ + * entrypoints: ["/app/index.ts"], + * files: { + * "/app/index.ts": ` + * import { helper } from "./helper.ts"; + * console.log(helper()); + * `, + * "/app/helper.ts": ` + * export function helper() { + * return "Hello from memory!"; + * } + * `, + * }, + * }); + * ``` + * + * @example + * ```ts + * // Override a file on disk with in-memory contents + * await Bun.build({ + * entrypoints: ["./src/index.ts"], + * files: { + * // This will be used instead of the actual ./src/config.ts file + * "./src/config.ts": `export const API_URL = "https://production.api.com";`, + * }, + * }); + * ``` + * + * @example + * ```ts + * // Mix disk files with in-memory files + * // Entry point is on disk, but imports a virtual file + * await Bun.build({ + * entrypoints: ["./src/index.ts"], // Real file on disk + * files: { + * // Virtual file that ./src/index.ts can import via "./generated.ts" + * "./src/generated.ts": `export const BUILD_TIME = ${Date.now()};`, + * }, + * }); + * ``` + */ + files?: Record; + /** * Generate a JSON file containing metadata about the build. * diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 645c0038f5..57a8fe763a 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -3,6 +3,211 @@ const debug = bun.Output.scoped(.Transpiler, .visible); pub const JSBundler = struct { const OwnedString = bun.MutableString; + /// A map of file paths to their in-memory contents. + /// This allows bundling with virtual files that may not exist on disk. + pub const FileMap = struct { + map: bun.StringHashMapUnmanaged(jsc.Node.BlobOrStringOrBuffer) = .empty, + + pub fn deinitAndUnprotect(self: *FileMap) void { + var iter = self.map.iterator(); + while (iter.next()) |entry| { + entry.value_ptr.deinitAndUnprotect(); + bun.default_allocator.free(entry.key_ptr.*); + } + self.map.deinit(bun.default_allocator); + } + + /// Resolve a specifier against the file map. + /// Returns the contents if the specifier exactly matches a key in the map, + /// or if the specifier is a relative path that, when joined with a source + /// directory, matches a key in the map. + pub fn get(self: *const FileMap, specifier: []const u8) ?[]const u8 { + if (self.map.count() == 0) return null; + + if (comptime !bun.Environment.isWindows) { + const entry = self.map.get(specifier) orelse return null; + return entry.slice(); + } + + // Normalize backslashes to forward slashes for consistent lookup + // Map keys are stored with forward slashes (normalized in fromJS) + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const normalized = bun.path.pathToPosixBuf(u8, specifier, buf); + const entry = self.map.get(normalized) orelse return null; + return entry.slice(); + } + + /// Check if the file map contains a given specifier. + pub fn contains(self: *const FileMap, specifier: []const u8) bool { + if (self.map.count() == 0) return false; + + if (comptime !bun.Environment.isWindows) { + return self.map.contains(specifier); + } + + // Normalize backslashes to forward slashes for consistent lookup + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const normalized = bun.path.pathToPosixBuf(u8, specifier, buf); + return self.map.contains(normalized); + } + + /// Returns a resolver Result for a file in the map, or null if not found. + /// This creates a minimal Result that can be used by the bundler. + /// + /// source_file: The path of the importing file (may be relative or absolute) + /// specifier: The import specifier (e.g., "./utils.js" or "/lib.js") + pub fn resolve(self: *const FileMap, source_file: []const u8, specifier: []const u8) ?_resolver.Result { + // Fast path: if the map is empty, return immediately + if (self.map.count() == 0) return null; + + // Check if the specifier is directly in the map + // Must use getKey to return the map's owned key, not the parameter + if (comptime !bun.Environment.isWindows) { + if (self.map.getKey(specifier)) |key| { + return _resolver.Result{ + .path_pair = .{ + .primary = Fs.Path.initWithNamespace(key, "file"), + }, + .module_type = .unknown, + }; + } + } else { + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const normalized_specifier = bun.path.pathToPosixBuf(u8, specifier, buf); + + if (self.map.getKey(normalized_specifier)) |key| { + return _resolver.Result{ + .path_pair = .{ + .primary = Fs.Path.initWithNamespace(key, "file"), + }, + .module_type = .unknown, + }; + } + } + + // Also try with source directory joined for relative specifiers + // Check for relative specifiers (not starting with / and not Windows absolute like C:/) + if (specifier.len > 0 and specifier[0] != '/' and + !(specifier.len >= 3 and specifier[1] == ':' and (specifier[2] == '/' or specifier[2] == '\\'))) + { + // First, ensure source_file is absolute. It may be relative (e.g., "../../Windows/Temp/...") + // on Windows when the bundler stores paths relative to cwd. + const abs_source_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(abs_source_buf); + const abs_source_file = if (isAbsolutePath(source_file)) + source_file + else + Fs.FileSystem.instance.absBuf(&.{source_file}, abs_source_buf); + + // Normalize source_file to use forward slashes (for Windows compatibility) + // On Windows, source_file may have backslashes from the real filesystem + // Use pathToPosixBuf which always converts \ to / regardless of platform + const source_file_buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(source_file_buf); + const normalized_source_file = bun.path.pathToPosixBuf(u8, abs_source_file, source_file_buf); + + // Extract directory from source_file using posix path handling + // For "/entry.js", we want "/"; for "/src/index.js", we want "/src/" + // For "C:/foo/bar.js", we want "C:/foo" + const buf = bun.path_buffer_pool.get(); + defer bun.path_buffer_pool.put(buf); + const source_dir = bun.path.dirname(normalized_source_file, .posix); + // If dirname returns empty but path starts with drive letter, extract the drive + root + const effective_source_dir = if (source_dir.len == 0) + (if (normalized_source_file.len >= 3 and normalized_source_file[1] == ':' and normalized_source_file[2] == '/') + normalized_source_file[0..3] // "C:/" + else if (normalized_source_file.len > 0 and normalized_source_file[0] == '/') + "/" + else + Fs.FileSystem.instance.top_level_dir) + else + source_dir; + // Use .loose to preserve Windows drive letters, then normalize in-place on Windows + const joined_len = bun.path.joinAbsStringBuf(effective_source_dir, buf, &.{specifier}, .loose).len; + if (bun.Environment.isWindows) { + bun.path.platformToPosixInPlace(u8, buf[0..joined_len]); + } + const joined = buf[0..joined_len]; + // Must use getKey to return the map's owned key, not the temporary buffer + if (self.map.getKey(joined)) |key| { + return _resolver.Result{ + .path_pair = .{ + .primary = Fs.Path.initWithNamespace(key, "file"), + }, + .module_type = .unknown, + }; + } + } + + return null; + } + + /// Check if a path is absolute (works for both posix and Windows paths) + fn isAbsolutePath(path: []const u8) bool { + if (path.len == 0) return false; + // Posix absolute path + if (path[0] == '/') return true; + // Windows absolute path with drive letter (e.g., "C:\..." or "C:/...") + if (path.len >= 3 and path[1] == ':' and (path[2] == '/' or path[2] == '\\')) { + return switch (path[0]) { + 'a'...'z', 'A'...'Z' => true, + else => false, + }; + } + // Windows UNC path (e.g., "\\server\share") + if (path.len >= 2 and path[0] == '\\' and path[1] == '\\') return true; + return false; + } + + /// Parse the files option from JavaScript. + /// Expected format: Record + /// Uses async parsing for cross-thread safety since bundler runs on a separate thread. + pub fn fromJS(globalThis: *jsc.JSGlobalObject, files_value: jsc.JSValue) JSError!FileMap { + var self = FileMap{ + .map = .empty, + }; + errdefer self.deinitAndUnprotect(); + + const files_obj = files_value.getObject() orelse { + return globalThis.throwInvalidArguments("Expected files to be an object", .{}); + }; + + var files_iter = try jsc.JSPropertyIterator(.{ + .skip_empty_name = true, + .include_value = true, + }).init(globalThis, files_obj); + defer files_iter.deinit(); + + try self.map.ensureTotalCapacity(bun.default_allocator, @intCast(files_iter.len)); + + while (try files_iter.next()) |prop| { + const property_value = files_iter.value; + + // Parse the value as BlobOrStringOrBuffer using async mode for thread safety + var blob_or_string = try jsc.Node.BlobOrStringOrBuffer.fromJSAsync(globalThis, bun.default_allocator, property_value) orelse { + return globalThis.throwInvalidArguments("Expected file content to be a string, Blob, File, TypedArray, or ArrayBuffer", .{}); + }; + errdefer blob_or_string.deinitAndUnprotect(); + + // Clone the key since we need to own it + const key = try prop.toOwnedSlice(bun.default_allocator); + + // Normalize backslashes to forward slashes for cross-platform consistency + // This ensures Windows paths like "C:\foo\bar.js" become "C:/foo/bar.js" + // Use dangerouslyConvertPathToPosixInPlace which always converts \ to / + // (uses sep_windows constant, not sep which varies by target) + bun.path.dangerouslyConvertPathToPosixInPlace(u8, key); + + self.map.putAssumeCapacity(key, blob_or_string); + } + + return self; + } + }; + pub const Config = struct { target: Target = Target.browser, entry_points: bun.StringSet = bun.StringSet.init(bun.default_allocator), @@ -46,6 +251,9 @@ pub const JSBundler = struct { env_prefix: OwnedString = OwnedString.initEmpty(bun.default_allocator), tsconfig_override: OwnedString = OwnedString.initEmpty(bun.default_allocator), compile: ?CompileOptions = null, + /// In-memory files that can be used as entrypoints or imported. + /// These files do not need to exist on disk. + files: FileMap = .{}, metafile: bool = false, pub const CompileOptions = struct { @@ -506,6 +714,11 @@ pub const JSBundler = struct { return globalThis.throwInvalidArguments("Expected entrypoints to be an array of strings", .{}); } + // Parse the files option for in-memory files + if (try config.getOwnObject(globalThis, "files")) |files_obj| { + this.files = try FileMap.fromJS(globalThis, files_obj.toJS()); + } + if (try config.getBooleanLoose(globalThis, "emitDCEAnnotations")) |flag| { this.emit_dce_annotations = flag; } @@ -539,6 +752,20 @@ pub const JSBundler = struct { const entry_points = this.entry_points.keys(); + // Check if all entry points are in the FileMap - if so, use cwd + if (this.files.map.count() > 0) { + var all_in_filemap = true; + for (entry_points) |ep| { + if (!this.files.contains(ep)) { + all_in_filemap = false; + break; + } + } + if (all_in_filemap) { + break :brk ZigString.Slice.fromUTF8NeverFree("."); + } + } + if (entry_points.len == 1) { break :brk ZigString.Slice.fromUTF8NeverFree(std.fs.path.dirname(entry_points[0]) orelse "."); } @@ -842,6 +1069,7 @@ pub const JSBundler = struct { self.env_prefix.deinit(); self.footer.deinit(); self.tsconfig_override.deinit(); + self.files.deinitAndUnprotect(); } }; @@ -1656,6 +1884,7 @@ const string = []const u8; const CompileTarget = @import("../../compile_target.zig"); const Fs = @import("../../fs.zig"); +const _resolver = @import("../../resolver/resolver.zig"); const resolve_path = @import("../../resolver/resolve_path.zig"); const std = @import("std"); diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index abfc821b07..fc67cea5cb 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -33,7 +33,7 @@ pub const BlobOrStringOrBuffer = union(enum) { pub fn deinitAndUnprotect(this: *BlobOrStringOrBuffer) void { switch (this.*) { - .string_or_buffer => |sob| { + .string_or_buffer => |*sob| { sob.deinitAndUnprotect(); }, .blob => |*blob| { @@ -46,14 +46,22 @@ pub const BlobOrStringOrBuffer = union(enum) { return this.slice().len; } - pub fn fromJSMaybeFile(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue, allow_file: bool) JSError!?BlobOrStringOrBuffer { + pub fn fromJSMaybeFileMaybeAsync(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue, allow_file: bool, is_async: bool) JSError!?BlobOrStringOrBuffer { // Check StringOrBuffer first because it's more common and cheaper. - const str = try StringOrBuffer.fromJS(global, allocator, value) orelse { + const str = try StringOrBuffer.fromJSMaybeAsync(global, allocator, value, is_async, true) orelse { const blob = value.as(jsc.WebCore.Blob) orelse return null; if (allow_file and blob.needsToReadFile()) { return global.throwInvalidArguments("File blob cannot be used here", .{}); } + if (is_async) { + // For async/cross-thread usage, copy the blob data to an owned slice + // rather than referencing the store which isn't thread-safe + const blob_data = blob.sharedView(); + const owned_data = allocator.dupe(u8, blob_data) catch return error.OutOfMemory; + return .{ .string_or_buffer = .{ .encoded_slice = jsc.ZigString.Slice.init(allocator, owned_data) } }; + } + if (blob.store) |store| { store.ref(); } @@ -63,10 +71,18 @@ pub const BlobOrStringOrBuffer = union(enum) { return .{ .string_or_buffer = str }; } + pub fn fromJSMaybeFile(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue, allow_file: bool) JSError!?BlobOrStringOrBuffer { + return fromJSMaybeFileMaybeAsync(global, allocator, value, allow_file, false); + } + pub fn fromJS(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue) JSError!?BlobOrStringOrBuffer { return fromJSMaybeFile(global, allocator, value, true); } + pub fn fromJSAsync(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue) JSError!?BlobOrStringOrBuffer { + return fromJSMaybeFileMaybeAsync(global, allocator, value, true, true); + } + pub fn fromJSWithEncodingValue(global: *jsc.JSGlobalObject, allocator: std.mem.Allocator, value: jsc.JSValue, encoding_value: jsc.JSValue) bun.JSError!?BlobOrStringOrBuffer { return fromJSWithEncodingValueAllowRequestResponse(global, allocator, value, encoding_value, false); } diff --git a/src/bundler/BundleThread.zig b/src/bundler/BundleThread.zig index 206698d168..f85129f9ba 100644 --- a/src/bundler/BundleThread.zig +++ b/src/bundler/BundleThread.zig @@ -132,6 +132,11 @@ pub fn BundleThread(CompletionStruct: type) type { BundleV2.JSBundleCompletionTask => completion, else => @compileError("Unknown completion struct: " ++ CompletionStruct), }; + // Set the file_map pointer for in-memory file support + this.file_map = if (completion.config.files.map.count() > 0) + &completion.config.files + else + null; completion.transpiler = this; defer { diff --git a/src/bundler/ParseTask.zig b/src/bundler/ParseTask.zig index 2621fdc06b..e8bbbcf696 100644 --- a/src/bundler/ParseTask.zig +++ b/src/bundler/ParseTask.zig @@ -636,6 +636,16 @@ fn getCodeForParseTaskWithoutPlugins( const trace = bun.perf.trace("Bundler.readFile"); defer trace.end(); + // Check FileMap for in-memory files first + if (task.ctx.file_map) |file_map| { + if (file_map.get(file_path.text)) |file_contents| { + break :brk .{ + .contents = file_contents, + .fd = bun.invalid_fd, + }; + } + } + if (strings.eqlComptime(file_path.namespace, "node")) lookup_builtin: { if (task.ctx.framework) |f| { if (f.built_in_modules.get(file_path.text)) |file| { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 4ba7522fa4..e8b93bb4d4 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -118,6 +118,9 @@ pub const BundleV2 = struct { bun_watcher: ?*bun.Watcher, plugins: ?*jsc.API.JSBundler.Plugin, completion: ?*JSBundleCompletionTask, + /// In-memory files that can be used as entrypoints or imported. + /// This is a pointer to the FileMap in the completion config. + file_map: ?*const jsc.API.JSBundler.FileMap, source_code_length: usize, /// There is a race condition where an onResolve plugin may schedule a task on the bundle thread before it's parsing task completes @@ -534,9 +537,47 @@ pub const BundleV2 = struct { target: options.Target, ) void { const transpiler = this.transpilerForTarget(target); + const source_dir = Fs.PathName.init(import_record.source_file).dirWithTrailingSlash(); + + // Check the FileMap first for in-memory files + if (this.file_map) |file_map| { + if (file_map.resolve(import_record.source_file, import_record.specifier)) |_file_map_result| { + var file_map_result = _file_map_result; + var path_primary = file_map_result.path_pair.primary; + const entry = bun.handleOom(this.pathToSourceIndexMap(target).getOrPut(this.allocator(), path_primary.text)); + if (!entry.found_existing) { + const loader: Loader = brk: { + const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index]; + if (record.loader) |out_loader| { + break :brk out_loader; + } + break :brk Fs.Path.init(path_primary.text).loader(&transpiler.options.loaders) orelse options.Loader.file; + }; + // For virtual files, use the path text as-is (no relative path computation needed). + path_primary.pretty = bun.handleOom(this.allocator().dupe(u8, path_primary.text)); + const idx = this.enqueueParseTask( + &file_map_result, + &.{ + .path = path_primary, + .contents = "", + }, + loader, + import_record.original_target, + ) catch |err| bun.handleOom(err); + entry.value_ptr.* = idx; + const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index]; + record.source_index = Index.init(idx); + } else { + const record: *ImportRecord = &this.graph.ast.items(.import_records)[import_record.importer_source_index].slice()[import_record.import_record_index]; + record.source_index = Index.init(entry.value_ptr.*); + } + return; + } + } + var had_busted_dir_cache: bool = false; var resolve_result: _resolver.Result = while (true) break transpiler.resolver.resolve( - Fs.PathName.init(import_record.source_file).dirWithTrailingSlash(), + source_dir, import_record.specifier, import_record.kind, ) catch |err| { @@ -880,6 +921,7 @@ pub const BundleV2 = struct { .bun_watcher = null, .plugins = null, .completion = null, + .file_map = null, .source_code_length = 0, .thread_lock = .initLocked(), }; @@ -1034,6 +1076,18 @@ pub const BundleV2 = struct { continue; } + // Check FileMap first for in-memory entry points + if (this.file_map) |file_map| { + if (file_map.resolve("", entry_point)) |file_map_result| { + _ = try this.enqueueEntryItem( + file_map_result, + true, + this.transpiler.options.target, + ); + continue; + } + } + // no plugins were matched const resolved = this.transpiler.resolveEntryPoint(entry_point) catch continue; @@ -3276,6 +3330,49 @@ pub const BundleV2 = struct { ast.target, }; + // Check the FileMap first for in-memory files + if (this.file_map) |file_map| { + if (file_map.resolve(source.path.text, import_record.path.text)) |_file_map_result| { + var file_map_result = _file_map_result; + var path_primary = file_map_result.path_pair.primary; + const import_record_loader = import_record.loader orelse Fs.Path.init(path_primary.text).loader(&transpiler.options.loaders) orelse .file; + import_record.loader = import_record_loader; + + if (this.pathToSourceIndexMap(target).get(path_primary.text)) |id| { + import_record.source_index = .init(id); + continue; + } + + const resolve_entry = resolve_queue.getOrPut(path_primary.text) catch |err| bun.handleOom(err); + if (resolve_entry.found_existing) { + import_record.path = resolve_entry.value_ptr.*.path; + continue; + } + + // For virtual files, use the path text as-is (no relative path computation needed). + path_primary.pretty = bun.handleOom(this.allocator().dupe(u8, path_primary.text)); + import_record.path = path_primary; + resolve_entry.key_ptr.* = path_primary.text; + debug("created ParseTask from FileMap: {s}", .{path_primary.text}); + const resolve_task = bun.handleOom(bun.default_allocator.create(ParseTask)); + file_map_result.path_pair.primary = path_primary; + resolve_task.* = ParseTask.init(&file_map_result, Index.invalid, this); + resolve_task.known_target = target; + // Use transpiler JSX options, applying force_node_env like the disk path does + resolve_task.jsx = transpiler.options.jsx; + resolve_task.jsx.development = switch (transpiler.options.force_node_env) { + .development => true, + .production => false, + .unspecified => transpiler.options.jsx.development, + }; + resolve_task.loader = import_record_loader; + resolve_task.tree_shaking = transpiler.options.tree_shaking; + resolve_task.side_effects = .has_side_effects; + resolve_entry.value_ptr.* = resolve_task; + continue; + } + } + var had_busted_dir_cache = false; var resolve_result: _resolver.Result = inner: while (true) break transpiler.resolver.resolveWithFramework( source_dir, diff --git a/test/bundler/bundler_files.test.ts b/test/bundler/bundler_files.test.ts new file mode 100644 index 0000000000..81a5d90457 --- /dev/null +++ b/test/bundler/bundler_files.test.ts @@ -0,0 +1,585 @@ +import { describe, expect, test } from "bun:test"; +import { tempDir } from "harness"; + +describe("bundler files option", () => { + test("basic in-memory file bundling", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": `console.log("hello from memory");`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("hello from memory"); + }); + + test("in-memory file with imports", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + import { foo } from "/lib.js"; + console.log(foo); + `, + "/lib.js": ` + export const foo = 42; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("42"); + }); + + test("in-memory file with relative imports (same directory)", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + import { bar } from "./utils.js"; + console.log(bar); + `, + "/utils.js": ` + export const bar = "relative import works"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("relative import works"); + }); + + test("in-memory file with relative imports (subdirectory)", async () => { + const result = await Bun.build({ + entrypoints: ["/src/entry.js"], + files: { + "/src/entry.js": ` + import { helper } from "./lib/helper.js"; + console.log(helper); + `, + "/src/lib/helper.js": ` + export const helper = "helper from subdirectory"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("helper from subdirectory"); + }); + + test("in-memory file with relative imports (parent directory)", async () => { + const result = await Bun.build({ + entrypoints: ["/src/app/entry.js"], + files: { + "/src/app/entry.js": ` + import { shared } from "../shared.js"; + console.log(shared); + `, + "/src/shared.js": ` + export const shared = "shared from parent"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("shared from parent"); + }); + + test("in-memory file with relative imports between multiple files", async () => { + const result = await Bun.build({ + entrypoints: ["/src/index.js"], + files: { + "/src/index.js": ` + import { componentA } from "./components/a.js"; + import { componentB } from "./components/b.js"; + console.log(componentA, componentB); + `, + "/src/components/a.js": ` + import { util } from "../utils/util.js"; + export const componentA = "A:" + util; + `, + "/src/components/b.js": ` + import { util } from "../utils/util.js"; + export const componentB = "B:" + util; + `, + "/src/utils/util.js": ` + export const util = "shared-util"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("shared-util"); + expect(output).toContain("A:"); + expect(output).toContain("B:"); + }); + + test("in-memory file with nested imports", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + import { a } from "/a.js"; + console.log(a); + `, + "/a.js": ` + import { b } from "/b.js"; + export const a = b + 1; + `, + "/b.js": ` + export const b = 100; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + // Execute the bundle to verify correct behavior + const output = await result.outputs[0].text(); + const fn = new Function(output + "; return typeof a !== 'undefined' ? a : 101;"); + // The bundle should contain the value 100 (from b.js) + expect(output).toContain("100"); + }); + + test("in-memory file with TypeScript", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.ts"], + files: { + "/entry.ts": ` + const x: number = 42; + console.log(x); + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("42"); + }); + + test("in-memory file with JSX", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.jsx"], + files: { + "/entry.jsx": ` + const element =
Hello JSX
; + console.log(element); + `, + }, + // Use classic JSX runtime to avoid needing react + jsx: { + runtime: "classic", + factory: "h", + fragment: "Fragment", + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("Hello JSX"); + }); + + test("in-memory file with Blob content", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": new Blob([`console.log("hello from blob");`]), + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("hello from blob"); + }); + + test("in-memory file with Uint8Array content", async () => { + const encoder = new TextEncoder(); + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": encoder.encode(`console.log("hello from uint8array");`), + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("hello from uint8array"); + }); + + test("in-memory file with ArrayBuffer content", async () => { + const encoder = new TextEncoder(); + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": encoder.encode(`console.log("hello from arraybuffer");`).buffer, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("hello from arraybuffer"); + }); + + test("in-memory file with re-exports", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + export { foo, bar } from "/lib.js"; + `, + "/lib.js": ` + export const foo = "foo"; + export const bar = "bar"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("foo"); + expect(output).toContain("bar"); + }); + + test("in-memory file with default export", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + import myDefault from "/lib.js"; + console.log(myDefault); + `, + "/lib.js": ` + export default "default export"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("default export"); + }); + + test("in-memory file with chained imports", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": ` + import { a } from "/a.js"; + console.log(a); + `, + "/a.js": ` + import { b } from "/b.js"; + export const a = "a" + b; + `, + "/b.js": ` + export const b = "b"; + `, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + // The bundle should contain both string literals from the chain + expect(output).toContain('"a"'); + expect(output).toContain('"b"'); + }); + + test("in-memory file overrides real file on disk", async () => { + // Create a temp directory with a real file + using dir = tempDir("bundler-files-override", { + "entry.js": ` + import { value } from "./lib.js"; + console.log(value); + `, + "lib.js": ` + export const value = "from disk"; + `, + }); + + const entryPath = `${dir}/entry.js`; + const libPath = `${dir}/lib.js`; + + // Bundle with in-memory file overriding the real lib.js + const result = await Bun.build({ + entrypoints: [entryPath], + files: { + [libPath]: `export const value = "from memory";`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + // The in-memory file should override the disk file + expect(output).toContain("from memory"); + expect(output).not.toContain("from disk"); + }); + + test("real file on disk can import in-memory file via relative path", async () => { + // Create a temp directory with a real entry file + using dir = tempDir("bundler-files-mixed", { + "entry.js": ` + import { helper } from "./helper.js"; + console.log(helper); + `, + }); + + const entryPath = `${dir}/entry.js`; + const helperPath = `${dir}/helper.js`; + + // Bundle with entry from disk, but helper.js only in memory + const result = await Bun.build({ + entrypoints: [entryPath], + files: { + [helperPath]: `export const helper = "helper from memory";`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("helper from memory"); + }); + + test("real file on disk can import nested in-memory files", async () => { + // Create a temp directory with a real entry file + using dir = tempDir("bundler-files-nested-mixed", { + "entry.js": ` + import { util } from "./lib/util.js"; + console.log(util); + `, + }); + + const entryPath = `${dir}/entry.js`; + const utilPath = `${dir}/lib/util.js`; + + // Bundle with entry from disk, but lib/util.js only in memory + const result = await Bun.build({ + entrypoints: [entryPath], + files: { + [utilPath]: `export const util = "nested util from memory";`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + expect(output).toContain("nested util from memory"); + }); + + test("mixed disk and memory files with complex import graph", async () => { + // Create a temp directory with some real files + using dir = tempDir("bundler-files-complex", { + "entry.js": ` + import { a } from "./a.js"; + import { b } from "./b.js"; + console.log(a, b); + `, + "a.js": ` + import { shared } from "./shared.js"; + export const a = "a:" + shared; + `, + // b.js will be in memory only + // shared.js will be overridden in memory + "shared.js": ` + export const shared = "disk-shared"; + `, + }); + + const entryPath = `${dir}/entry.js`; + const bPath = `${dir}/b.js`; + const sharedPath = `${dir}/shared.js`; + + // Bundle with: + // - entry.js from disk + // - a.js from disk (imports shared.js) + // - b.js from memory (imports shared.js) + // - shared.js overridden in memory + const result = await Bun.build({ + entrypoints: [entryPath], + files: { + [bPath]: ` + import { shared } from "./shared.js"; + export const b = "b:" + shared; + `, + [sharedPath]: `export const shared = "memory-shared";`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + // Both a.js and b.js should use the memory version of shared.js + expect(output).toContain("memory-shared"); + expect(output).not.toContain("disk-shared"); + }); + + test("relative files keys override relative import specifier", async () => { + // Create a temp directory with a real entry file and a config file on disk + using dir = tempDir("bundler-files-relative-keys", { + "entry.js": ` + import { config } from "./config.js"; + console.log(config); + `, + "config.js": ` + export const config = "from disk"; + `, + }); + + const entryPath = `${dir}/entry.js`; + + // Bundle with a relative key in files map that matches the import specifier + // The key should be resolved relative to the entry point + const result = await Bun.build({ + entrypoints: [entryPath], + files: { + [`${dir}/config.js`]: `export const config = "from memory via relative key";`, + }, + }); + + expect(result.success).toBe(true); + expect(result.outputs.length).toBe(1); + + const output = await result.outputs[0].text(); + // The in-memory file should override the disk file + expect(output).toContain("from memory via relative key"); + expect(output).not.toContain("from disk"); + }); + + test("onLoad plugin can transform in-memory files", async () => { + let loadCalled = false; + let loadedPath = ""; + + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": `import { value } from "./lib.js"; console.log(value);`, + "/lib.js": `export const value = "original";`, + }, + plugins: [ + { + name: "test-onload", + setup(build) { + build.onLoad({ filter: /lib\.js$/ }, args => { + loadCalled = true; + loadedPath = args.path; + return { + contents: `export const value = "transformed by plugin";`, + loader: "js", + }; + }); + }, + }, + ], + }); + + expect(result.success).toBe(true); + expect(loadCalled).toBe(true); + expect(loadedPath).toBe("/lib.js"); + + const output = await result.outputs[0].text(); + expect(output).toContain("transformed by plugin"); + expect(output).not.toContain("original"); + }); + + test("onResolve plugin can redirect in-memory file imports", async () => { + let resolveCalled = false; + + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": `import { value } from "virtual:data"; console.log(value);`, + "/actual-data.js": `export const value = "from actual-data";`, + }, + plugins: [ + { + name: "test-onresolve", + setup(build) { + build.onResolve({ filter: /^virtual:data$/ }, args => { + resolveCalled = true; + return { + path: "/actual-data.js", + namespace: "file", + }; + }); + }, + }, + ], + }); + + expect(result.success).toBe(true); + expect(resolveCalled).toBe(true); + + const output = await result.outputs[0].text(); + expect(output).toContain("from actual-data"); + }); + + test("plugin can provide content for in-memory file via onLoad", async () => { + const result = await Bun.build({ + entrypoints: ["/entry.js"], + files: { + "/entry.js": `import data from "./data.json"; console.log(data.name);`, + // Provide empty placeholder - plugin will replace content + "/data.json": `{}`, + }, + plugins: [ + { + name: "json-transform", + setup(build) { + build.onLoad({ filter: /\.json$/ }, args => { + return { + contents: `export default { name: "injected by plugin" };`, + loader: "js", + }; + }); + }, + }, + ], + }); + + expect(result.success).toBe(true); + + const output = await result.outputs[0].text(); + expect(output).toContain("injected by plugin"); + }); +});