diff --git a/.prettierignore b/.prettierignore index 5344afdbd8..e69cb33c98 100644 --- a/.prettierignore +++ b/.prettierignore @@ -8,3 +8,5 @@ src/react-refresh.js test/snippets test/js/node/test bun.lock +# formatting adds many levels of indent, doubling the file size +src/bake/incremental_visualizer.html \ No newline at end of file diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 574830db31..b6104bd8d0 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -442,10 +442,14 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { dev.server_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); dev.client_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.client_transpiler.options.barrel_files = try @import("../runtime.zig").Runtime.Features.getDefaultBarrelFiles(allocator); + dev.server_transpiler.options.barrel_files = dev.client_transpiler.options.barrel_files; + if (separate_ssr_graph) { dev.framework.initTranspiler(allocator, &dev.log, .development, .ssr, &dev.ssr_transpiler, &dev.bundler_options.ssr) catch |err| return global.throwError(err, generic_action); dev.ssr_transpiler.options.dev_server = dev; + dev.ssr_transpiler.options.barrel_files = dev.client_transpiler.options.barrel_files; dev.ssr_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); } @@ -1634,7 +1638,6 @@ fn startAsyncBundle( ); bv2.bun_watcher = dev.bun_watcher; bv2.asynchronous = true; - { dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); diff --git a/src/bake/incremental_visualizer.html b/src/bake/incremental_visualizer.html index 31af95998c..9388922f09 100644 --- a/src/bake/incremental_visualizer.html +++ b/src/bake/incremental_visualizer.html @@ -1,407 +1,446 @@ - - - - - IncrementalGraph Visualization - - - + #stat { + font-weight: normal; + } - -

IncrementalGraph Visualization

-
-
-
-
- Stale -
-
-
- Client -
-
-
- HTML -
-
-
- Route -
-
-
- SSR -
-
-
- Server -
-
-
- SSR + Server -
-
+ .vis-tooltip { + background-color: #1e1e2e; + color: #cdd6f4; + padding: 0.5rem; + border-radius: 4px; + border: 1px solid #6c7086; + font-family: monospace; + transition: none; + } + + + +

IncrementalGraph Visualization

+
+
+
+
+ Stale +
+
+
+ Client +
+
+
+ HTML +
+
+
+ Route +
+
+
+ SSR +
+
+
+ Server +
+
+
+ SSR + Server +
+
+ - - +// Add selection handler +network.on("selectNode", function (params) { + const nodeId = params.nodes[0]; + const node = nodes.get(nodeId); + + // Update label to show full path when selected + nodes.update({ + id: nodeId, + label: node.fullPath + }); +}); + +network.on("deselectNode", function (params) { + // Restore original label when deselected + params.previousSelection.nodes.forEach(nodeId => { + const node = nodes.get(nodeId); + nodes.update({ + id: nodeId, + label: basename(node.fullPath) + }); + }); +}); + diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 8bb9355f06..1b93045b64 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -57,6 +57,7 @@ const C = bun.C; const std = @import("std"); const lex = @import("../js_lexer.zig"); const Logger = @import("../logger.zig"); +const Source = Logger.Source; const options = @import("../options.zig"); const js_parser = bun.js_parser; const Part = js_ast.Part; @@ -973,6 +974,7 @@ pub const BundleV2 = struct { .allocator = undefined, .kit_referenced_server_data = false, .kit_referenced_client_data = false, + .barrel_files = .empty, }, .linker = .{ .loop = event_loop, @@ -988,7 +990,9 @@ pub const BundleV2 = struct { }; if (bake_options) |bo| { this.client_transpiler = bo.client_transpiler; + this.client_transpiler.options.barrel_files = this.transpiler.options.barrel_files; this.ssr_transpiler = bo.ssr_transpiler; + this.ssr_transpiler.options.barrel_files = this.transpiler.options.barrel_files; this.framework = bo.framework; this.linker.framework = &this.framework.?; this.plugins = bo.plugins; @@ -2287,6 +2291,14 @@ pub const BundleV2 = struct { on_parse_finalizers.deinit(bun.default_allocator); } + for (this.graph.barrel_files.values()) |*barrel| switch (barrel.*) { + .pending => {}, + .done, .deoptimized => |rq| for (rq.values()) |pt| { + bun.debugAssert(!pt.source_index.isValid()); + bun.default_allocator.destroy(pt); + }, + }; + defer this.graph.ast.deinit(bun.default_allocator); defer this.graph.input_files.deinit(bun.default_allocator); if (this.graph.pool.workers_assignments.count() > 0) { @@ -2385,8 +2397,8 @@ pub const BundleV2 = struct { try this.cloneAST(); this.graph.heap.helpCatchMemoryIssues(); - this.dynamic_import_entry_points = .init(this.graph.allocator); + var html_files: std.AutoArrayHashMapUnmanaged(Index, void) = .{}; // Separate non-failing files into two lists: JS and CSS @@ -2397,17 +2409,20 @@ pub const BundleV2 = struct { const asts = this.graph.ast.slice(); const css_asts = asts.items(.css); + const all_parts = asts.items(.parts); const input_files = this.graph.input_files.slice(); const loaders = input_files.items(.loader); - const sources = input_files.items(.source); + const sources: []Source = input_files.items(.source); for ( - asts.items(.parts)[1..], + all_parts[1..], asts.items(.import_records)[1..], css_asts[1..], asts.items(.target)[1..], + asts.items(.exports_kind)[1..], 1.., - ) |part_list, import_records, maybe_css, target, index| { + ) |part_list, import_records, maybe_css, target, exports_kind, index_raw| { + const source_index = Index.init(index_raw); // Dev Server proceeds even with failed files. // These files are filtered out via the lack of any parts. // @@ -2417,11 +2432,11 @@ pub const BundleV2 = struct { // CSS has restrictions on what files can be imported. // This means the file can become an error after // resolution, which is not usually the case. - css_total_files.appendAssumeCapacity(Index.init(index)); + css_total_files.appendAssumeCapacity(source_index); var log = Logger.Log.init(this.graph.allocator); defer log.deinit(); if (this.linker.scanCSSImports( - @intCast(index), + @intCast(source_index.get()), import_records.slice(), css_asts, sources, @@ -2434,21 +2449,36 @@ pub const BundleV2 = struct { try dev_server.handleParseTaskFailure( error.InvalidCssImport, .client, - sources[index].path.text, + sources[source_index.get()].path.text, &log, ); // Since there is an error, do not treat it as a // valid CSS chunk. - _ = start.css_entry_points.swapRemove(Index.init(index)); + _ = start.css_entry_points.swapRemove(source_index); } } else { + if (exports_kind == .esm_barrel_file) { + // Barrel files exist in the parse graph, but all + // import records have been disconnected from the + // barrel, so this file can be skipped unless it + // was de-optimized. + const key = sources[source_index.get()].path.hashKey() ^ bun.hash(std.mem.asBytes(&target)); + const barrel = this.graph.barrel_files.get(key) orelse { + bun.debugAssert(false); + continue; + }; + bun.debugAssert(barrel != .pending); + if (barrel != .deoptimized) + continue; + } + // HTML files are special cased because they correspond // to routes in DevServer. They have a JS chunk too, // derived off of the import record list. - if (loaders[index] == .html) { - try html_files.put(this.graph.allocator, Index.init(index), {}); + if (loaders[source_index.get()] == .html) { + try html_files.put(this.graph.allocator, source_index, {}); } else { - js_files.appendAssumeCapacity(Index.init(index)); + js_files.appendAssumeCapacity(source_index); // Mark every part live. for (part_list.slice()) |*p| { @@ -2460,7 +2490,7 @@ pub const BundleV2 = struct { for (import_records.slice()) |*record| { if (!record.source_index.isValid()) continue; if (loaders[record.source_index.get()] != .css) continue; - if (asts.items(.parts)[record.source_index.get()].len == 0) { + if (all_parts[record.source_index.get()].len == 0) { record.source_index = Index.invalid; continue; } @@ -2474,7 +2504,7 @@ pub const BundleV2 = struct { } } else { // Treat empty CSS files for removal. - _ = start.css_entry_points.swapRemove(Index.init(index)); + _ = start.css_entry_points.swapRemove(source_index); } } @@ -2756,8 +2786,8 @@ pub const BundleV2 = struct { estimated_resolve_queue_count += @as(usize, @intFromBool(!(import_record.is_internal or import_record.is_unused or import_record.source_index.isValid()))); } - var resolve_queue = ResolveQueue.init(this.graph.allocator); - resolve_queue.ensureTotalCapacity(estimated_resolve_queue_count) catch bun.outOfMemory(); + var resolve_queue: ResolveQueue = .empty; + resolve_queue.ensureTotalCapacity(this.graph.allocator, estimated_resolve_queue_count) catch bun.outOfMemory(); var last_error: ?anyerror = null; @@ -2997,15 +3027,51 @@ pub const BundleV2 = struct { continue; } + defer if (import_record.tag == .barrel) { + const key = path.hashKey() ^ bun.hash(std.mem.asBytes(&target)); + const gop = this.graph.barrel_files.getOrPut(this.graph.allocator, key) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = .{ .pending = .empty }; + } + switch (gop.value_ptr.*) { + .pending => |*items| items.append(this.graph.allocator, .{ + .importer_source_index = source.index, + .import_record_index = .init(@intCast(i)), + }) catch bun.outOfMemory(), + .done, .deoptimized => |*rq| { + const barrel_source_index = import_record.source_index; + bun.assert(barrel_source_index.isValid()); + switch (this.processBarrelRecord(.{ + .resolve_queue = rq, + .importer_source_index = source.index, + .importer_named_imports = &ast.named_imports, + .importer_record = import_record, + .importer_record_index = .init(@intCast(i)), + .barrel_named_exports = &this.graph.ast.items(.named_exports)[barrel_source_index.get()], + .barrel_named_imports = &this.graph.ast.items(.named_imports)[barrel_source_index.get()], + .barrel_import_records = this.graph.ast.items(.import_records)[barrel_source_index.get()].slice(), + .path_to_source_index_map = this.pathToSourceIndexMap(target), + })) { + .reused_parse_task, .not_found => {}, + .new_parse_task => this.incrementScanCounter(), + .deoptimize => { + const rq_copy = rq.*; // avoid possible RLS aliasing + gop.value_ptr.* = .{ .deoptimized = rq_copy }; + }, + } + }, + } + }; + if (this.transpiler.options.dev_server) |dev_server| brk: { if (loader == .css) { // Do not use cached files for CSS. break :brk; } - import_record.source_index = Index.invalid; - if (dev_server.isFileCached(path.text, bake_graph)) |entry| { + import_record.source_index = Index.invalid; + const rel = bun.path.relativePlatform(this.transpiler.fs.top_level_dir, path.text, .loose, false); if (loader == .html and entry.kind == .asset) { // Overload `path.text` to point to the final URL @@ -3033,17 +3099,17 @@ pub const BundleV2 = struct { const hash_key = path.hashKey(); if (this.pathToSourceIndexMap(target).get(hash_key)) |id| { + import_record.source_index = Index.init(id); if (this.transpiler.options.dev_server != null and loader != .html) { import_record.path = this.graph.input_files.items(.source)[id].path; - } else { - import_record.source_index = Index.init(id); } continue; } - const resolve_entry = resolve_queue.getOrPut(hash_key) catch bun.outOfMemory(); + const resolve_entry = resolve_queue.getOrPut(this.graph.allocator, hash_key) catch bun.outOfMemory(); if (resolve_entry.found_existing) { import_record.path = resolve_entry.value_ptr.*.path; + import_record.source_index = resolve_entry.value_ptr.*.source_index; continue; } @@ -3067,6 +3133,7 @@ pub const BundleV2 = struct { resolve_task.secondary_path_for_commonjs_interop = secondary_path_to_copy; resolve_task.known_target = target; resolve_task.jsx = resolve_result.jsx; + resolve_task.is_barrel_file = import_record.tag == .barrel; resolve_task.jsx.development = switch (transpiler.options.force_node_env) { .development => true, .production => false, @@ -3074,20 +3141,18 @@ pub const BundleV2 = struct { }; // Figure out the loader. - { - if (import_record.tag.loader()) |l| { - resolve_task.loader = l; - } + if (import_record.tag.loader()) |l| { + resolve_task.loader = l; + } - if (resolve_task.loader == null) { - resolve_task.loader = path.loader(&this.transpiler.options.loaders); - resolve_task.tree_shaking = this.transpiler.options.tree_shaking; - } + if (resolve_task.loader == null) { + resolve_task.loader = path.loader(&this.transpiler.options.loaders); + resolve_task.tree_shaking = this.transpiler.options.tree_shaking; + } - // HTML must be an entry point. - if (resolve_task.loader) |*l| { - l.* = l.disableHTML(); - } + // HTML must be an entry point. + if (resolve_task.loader) |*l| { + l.* = l.disableHTML(); } resolve_entry.value_ptr.* = resolve_task; @@ -3095,7 +3160,7 @@ pub const BundleV2 = struct { if (last_error) |err| { debug("failed with error: {s}", .{@errorName(err)}); - resolve_queue.clearAndFree(); + resolve_queue.clearAndFree(this.graph.allocator); parse_result.value = .{ .err = .{ .err = err, @@ -3110,8 +3175,6 @@ pub const BundleV2 = struct { return resolve_queue; } - const ResolveQueue = std.AutoArrayHashMap(u64, *ParseTask); - pub fn onNotifyDefer(this: *BundleV2) void { this.thread_lock.assertLocked(); this.graph.deferred_pending += 1; @@ -3122,6 +3185,83 @@ pub const BundleV2 = struct { this.onNotifyDefer(); } + fn processEnqueuedResolveTask(this: *BundleV2, hash: u64, value: *ParseTask, path_to_source_index_map: *PathToSourceIndexMap, import_records_to_update_source_index: []const *ImportRecord) bool { + var existing = path_to_source_index_map.getOrPut(this.graph.allocator, hash) catch unreachable; + + // If the same file is imported and required, and those point to different files + // Automatically rewrite it to the secondary one + if (value.secondary_path_for_commonjs_interop) |secondary_path| { + const secondary_hash = secondary_path.hashKey(); + if (path_to_source_index_map.get(secondary_hash)) |secondary| { + existing.found_existing = true; + existing.value_ptr.* = secondary; + } + } + + if (!existing.found_existing) { + var new_task: *ParseTask = value; + var new_input_file = Graph.InputFile{ + .source = Logger.Source.initEmptyFile(new_task.path.text), + .side_effects = value.side_effects, + }; + + const loader = new_task.loader orelse new_input_file.source.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file; + + new_input_file.source.index = Index.source(this.graph.input_files.len); + new_input_file.source.path = new_task.path; + + // We need to ensure the loader is set or else importstar_ts/ReExportTypeOnlyFileES6 will fail. + new_input_file.loader = loader; + + existing.value_ptr.* = new_input_file.source.index.get(); + new_task.source_index = new_input_file.source.index; + + new_task.ctx = this; + this.graph.input_files.append(bun.default_allocator, new_input_file) catch unreachable; + this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable; + + for (import_records_to_update_source_index) |record| { + record.source_index = new_task.source_index; + record.path = new_task.path; + } + + if (this.enqueueOnLoadPluginIfNeeded(new_task)) { + return true; + } + + if (loader.shouldCopyForBundling()) { + var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[new_task.source_index.get()]; + additional_files.push(this.graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable; + new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data; + this.graph.estimated_file_loader_count += 1; + } + + // schedule as early as possible + this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&new_task.task)); + + return true; + } else { + const loader = value.loader orelse + this.graph.input_files.items(.source)[existing.value_ptr.*].path.loader(&this.transpiler.options.loaders) orelse + options.Loader.file; + + if (loader.shouldCopyForBundling()) { + var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[existing.value_ptr.*]; + additional_files.push(this.graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable; + this.graph.estimated_file_loader_count += 1; + } + + for (import_records_to_update_source_index) |record| { + record.source_index = .init(existing.value_ptr.*); + record.path = value.path; + } + + bun.default_allocator.destroy(value); + } + + return false; + } + pub fn onParseTaskComplete(parse_result: *ParseTask.Result, this: *BundleV2) void { const trace = tracer(@src(), "onParseTaskComplete"); defer trace.end(); @@ -3150,8 +3290,8 @@ pub const BundleV2 = struct { this.onAfterDecrementScanCounter(); } - var resolve_queue = ResolveQueue.init(this.graph.allocator); - defer resolve_queue.deinit(); + var resolve_queue: ResolveQueue = .empty; + defer resolve_queue.deinit(this.graph.allocator); var process_log = true; if (parse_result.value == .success) { @@ -3227,82 +3367,67 @@ pub const BundleV2 = struct { result.ast.named_exports.count(), }); - var iter = resolve_queue.iterator(); - + var import_records = result.ast.import_records.clone(this.graph.allocator) catch unreachable; const path_to_source_index_map = this.pathToSourceIndexMap(result.ast.target); - while (iter.next()) |entry| { - const hash = entry.key_ptr.*; - const value = entry.value_ptr.*; - var existing = path_to_source_index_map.getOrPut(graph.allocator, hash) catch unreachable; + if (result.is_barrel_file) { + result.ast.exports_kind = .esm_barrel_file; - // If the same file is imported and required, and those point to different files - // Automatically rewrite it to the secondary one - if (value.secondary_path_for_commonjs_interop) |secondary_path| { - const secondary_hash = secondary_path.hashKey(); - if (path_to_source_index_map.get(secondary_hash)) |secondary| { - existing.found_existing = true; - existing.value_ptr.* = secondary; + // Process all import requests to this barrel file, with each one + // - Rewriting the importer's `import_record.source_index` to match + // - Ensuring the parse task is queued if it was found in `resolve_queue` + // + // Then, the state is changed into `.done`, and followup + // requests have the resolve queue to work off of. BundleV2.deinit + // cleans up unqueued *ParseTask objects. + const key = result.source.path.hashKey() ^ bun.hash(std.mem.asBytes(&result.ast.target)); + const barrel_ptr = this.graph.barrel_files.getPtr(key) orelse + @panic("Internal assertion failure: missing barrel file entry"); + bun.assert(barrel_ptr.* == .pending); + const pending = &barrel_ptr.pending; + + const all_import_record_lists = this.graph.ast.items(.import_records); + const all_named_imports: []JSAst.NamedImports = this.graph.ast.items(.named_imports); + + var had_barrel_deoptimization = false; + for (pending.items) |request| { + const request_import_records = all_import_record_lists[request.importer_source_index.get()].slice(); + switch (this.processBarrelRecord(.{ + .resolve_queue = &resolve_queue, + .importer_source_index = request.importer_source_index, + .importer_named_imports = &all_named_imports[request.importer_source_index.get()], + .importer_record = &request_import_records[request.import_record_index.get()], + .importer_record_index = request.import_record_index, + .barrel_named_exports = &result.ast.named_exports, + .barrel_named_imports = &result.ast.named_imports, + .barrel_import_records = result.ast.import_records.slice(), + .path_to_source_index_map = path_to_source_index_map, + })) { + .reused_parse_task, .not_found => {}, + .new_parse_task => diff += 1, + .deoptimize => { + had_barrel_deoptimization = true; + break; + }, } } - if (!existing.found_existing) { - var new_task: *ParseTask = value; - var new_input_file = Graph.InputFile{ - .source = Logger.Source.initEmptyFile(new_task.path.text), - .side_effects = value.side_effects, - }; + pending.deinit(this.graph.allocator); + barrel_ptr.* = if (had_barrel_deoptimization) + .{ .deoptimized = resolve_queue } + else + .{ .done = resolve_queue }; + resolve_queue = .empty; + } else { + var iter = resolve_queue.iterator(); - const loader = new_task.loader orelse new_input_file.source.path.loader(&this.transpiler.options.loaders) orelse options.Loader.file; - - new_input_file.source.index = Index.source(graph.input_files.len); - new_input_file.source.path = new_task.path; - - // We need to ensure the loader is set or else importstar_ts/ReExportTypeOnlyFileES6 will fail. - new_input_file.loader = loader; - - existing.value_ptr.* = new_input_file.source.index.get(); - new_task.source_index = new_input_file.source.index; - - new_task.ctx = this; - graph.input_files.append(bun.default_allocator, new_input_file) catch unreachable; - graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable; - diff += 1; - - if (this.enqueueOnLoadPluginIfNeeded(new_task)) { - continue; - } - - if (loader.shouldCopyForBundling()) { - var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; - additional_files.push(this.graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable; - new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data; - graph.estimated_file_loader_count += 1; - } - - // schedule as early as possible - graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&new_task.task)); - } else { - const loader = value.loader orelse - graph.input_files.items(.source)[existing.value_ptr.*].path.loader(&this.transpiler.options.loaders) orelse - options.Loader.file; - - if (loader.shouldCopyForBundling()) { - var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; - additional_files.push(this.graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable; - graph.estimated_file_loader_count += 1; - } - - bun.default_allocator.destroy(value); + while (iter.next()) |entry| { + diff += @intFromBool(this.processEnqueuedResolveTask(entry.key_ptr.*, entry.value_ptr.*, path_to_source_index_map, &.{})); } } - var import_records = result.ast.import_records.clone(this.graph.allocator) catch unreachable; - const input_file_loaders = this.graph.input_files.items(.loader); - const save_import_record_source_index = this.transpiler.options.dev_server == null or - result.loader == .html or - result.loader == .css; + const save_import_record_source_index = true; if (this.resolve_tasks_waiting_for_import_source_index.fetchSwapRemove(result.source.index.get())) |pending_entry| { for (pending_entry.value.slice()) |to_assign| { @@ -3441,8 +3566,90 @@ pub const BundleV2 = struct { pub fn bustDirCache(vm: *BundleV2, path: []const u8) bool { return vm.transpiler.resolver.bustDirCache(path); } + + const ProcessBarrelResult = enum { + reused_parse_task, + /// Ignoring this is OK because the linker will just raise a bundler error. + not_found, + /// Increment pending count by one please. + new_parse_task, + /// Should convert the barrel into a regular file. + deoptimize, + }; + pub fn processBarrelRecord(this: *BundleV2, opts: struct { + resolve_queue: *ResolveQueue, + importer_source_index: Source.Index, + importer_named_imports: *JSAst.NamedImports, + importer_record: *ImportRecord, + importer_record_index: ImportRecord.Index, + barrel_named_exports: *JSAst.NamedExports, + barrel_named_imports: *JSAst.NamedImports, + barrel_import_records: []ImportRecord, + path_to_source_index_map: *PathToSourceIndexMap, + }) ProcessBarrelResult { + bun.assert(opts.importer_record.tag == .barrel); + + // Locate the named import entry for this record. + // TODO: avoid this loop + const named_import: *js_ast.NamedImport = for (opts.importer_named_imports.values()) |*named_import| { + if (named_import.import_record_index == opts.importer_record_index.get()) + break named_import; + } else { + bun.debugAssert(false); + return .deoptimize; + }; + + // Locate the corresponding export in THIS file + const alias = named_import.alias orelse return .deoptimize; + const barrel_named_export = opts.barrel_named_exports.get(alias) orelse + return .not_found; // import does not exist + + // Locate the import it maps to. + const barrel_named_import = opts.barrel_named_imports.get(barrel_named_export.ref) orelse + return .deoptimize; // not a re-export :( + + // TODO: dig through multiple layers of exports? + + const barrel_import_record = &opts.barrel_import_records[barrel_named_import.import_record_index]; + const result: ProcessBarrelResult = if (barrel_import_record.source_index.isValid()) res: { + opts.importer_record.source_index = barrel_import_record.source_index; + opts.importer_record.path = barrel_import_record.path; + break :res .reused_parse_task; + } else res: { + const hash = barrel_import_record.path.hashKey(); + const entry = opts.resolve_queue.fetchSwapRemove(hash) orelse { + // cached in incremental graph. + bun.debugAssert(this.transpiler.options.dev_server != null); + opts.importer_record.path = barrel_import_record.path; + opts.importer_record.source_index = .invalid; + break :res .reused_parse_task; + }; + break :res if (this.processEnqueuedResolveTask( + entry.key, + entry.value, + opts.path_to_source_index_map, + &.{ opts.importer_record, barrel_import_record }, + )) .new_parse_task else .reused_parse_task; + }; + + // This code is extremely silly, but at this point the list of import + // `ClauseItem`s is not known, so the namespace_alias here (unused by + // default) is used to communicate the renamed alias. + bun.debugAssert(this.transpiler.options.dev_server != null); + if (barrel_named_import.alias) |a| { + const namespace_ref = named_import.namespace_ref.?; + const symbol: *js_ast.Symbol = this.graph.ast.items(.symbols)[opts.importer_source_index.get()].mut(namespace_ref.inner_index); + symbol.namespace_alias = .{ .alias = a, .namespace_ref = .None }; + } + + return result; + } }; +/// Deduplicated path hashes -> *ParseTask. When *ParseTask has a source index +/// set, it has been queued. otherwise, it is not enqueued. +const ResolveQueue = std.AutoArrayHashMapUnmanaged(u64, *ParseTask); + /// Used to keep the bundle thread from spinning on Windows pub fn timerCallback(_: *bun.windows.libuv.Timer) callconv(.C) void {} @@ -3687,6 +3894,7 @@ pub const ParseTask = struct { ctx: *BundleV2, package_version: string = "", is_entry_point: bool = false, + is_barrel_file: bool = false, /// This is set when the file is an entrypoint, and it has an onLoad plugin. /// In this case we want to defer adding this to additional_files until after /// the onLoad plugin has finished. @@ -3728,7 +3936,7 @@ pub const ParseTask = struct { log: Logger.Log, use_directive: UseDirective, side_effects: _resolver.SideEffects, - + is_barrel_file: bool = false, /// Used by "file" loader files. unique_key_for_additional_file: []const u8 = "", /// Used by "file" loader files. @@ -4783,6 +4991,7 @@ pub const ParseTask = struct { opts.features.inlining = transpiler.options.minify_syntax; opts.output_format = output_format; opts.features.minify_syntax = transpiler.options.minify_syntax; + opts.features.barrel_files = transpiler.options.barrel_files; opts.features.minify_identifiers = transpiler.options.minify_identifiers; opts.features.emit_decorator_metadata = transpiler.options.emit_decorator_metadata; opts.features.unwrap_commonjs_packages = transpiler.options.unwrap_commonjs_packages; @@ -4862,7 +5071,7 @@ pub const ParseTask = struct { .unique_key_for_additional_file = unique_key_for_additional_file.key, .side_effects = task.side_effects, .loader = loader, - + .is_barrel_file = task.is_barrel_file, // Hash the files in here so that we do it in parallel. .content_hash_for_additional_file = if (loader.shouldCopyForBundling()) unique_key_for_additional_file.content_hash @@ -5391,12 +5600,17 @@ pub const Graph = struct { additional_output_files: std.ArrayListUnmanaged(options.OutputFile) = .{}, + /// Map from a key describing a barrel file to it's state. + /// Keys are `path.hashKey() ^ bun.hash(std.mem.asBytes(&target))` + /// Values contains pointers to globally allocated memory that is freed in BundleV2.deinit + barrel_files: std.AutoArrayHashMapUnmanaged(u64, BarrelState), + kit_referenced_server_data: bool, kit_referenced_client_data: bool, pub const InputFile = struct { source: Logger.Source, - loader: options.Loader = options.Loader.file, + loader: options.Loader = .file, side_effects: _resolver.SideEffects, allocator: std.mem.Allocator = bun.default_allocator, additional_files: BabyList(AdditionalFile) = .{}, @@ -5426,6 +5640,20 @@ pub const Graph = struct { } }; +pub const BarrelState = union(enum) { + /// List of barrel import records to resolve + pending: std.ArrayListUnmanaged(PendingEntry), + /// Refer to this map and the barrel's source index to lookup an entry. + done: ResolveQueue, + /// The barrel was de-optimized at least once, but still try and use it. + deoptimized: ResolveQueue, + + pub const PendingEntry = struct { + importer_source_index: Source.Index, + import_record_index: ImportRecord.Index, + }; +}; + pub const AdditionalFile = union(enum) { source_index: Index.Int, output_file: Index.Int, @@ -6118,7 +6346,7 @@ pub const LinkerContext = struct { return true; } - fn load( + pub fn load( this: *LinkerContext, bundle: *BundleV2, entry_points: []Index, @@ -12638,15 +12866,12 @@ pub const LinkerContext = struct { /// The conversion logic is completely different for format .internal_bake_dev fn convertStmtsForChunkForBake( c: *LinkerContext, - source_index: u32, stmts: *StmtList, part_stmts: []const js_ast.Stmt, allocator: std.mem.Allocator, ast: *const JSAst, ) !void { - _ = source_index; // may be used - - const receiver_args = try allocator.dupe(G.Arg, &.{ + const default_receiver_args = try allocator.dupe(G.Arg, &.{ .{ .binding = Binding.alloc(allocator, B.Identifier{ .ref = ast.module_ref }, Logger.Loc.Empty) }, }); const module_id = Expr.initIdentifier(ast.module_ref, Logger.Loc.Empty); @@ -12670,7 +12895,7 @@ pub const LinkerContext = struct { // pretty path is not yet known. the other statement types // are not handled here because some of those generate // new local variables (it is too late to do that here). - const record = ast.import_records.at(st.import_record_index); + var record = ast.import_records.at(st.import_record_index); const is_bare_import = st.star_name_loc == null and st.items.len == 0 and st.default_name == null; @@ -12680,6 +12905,54 @@ pub const LinkerContext = struct { else true; + // Barrel imports need to have proper import symbols. Before, this, it would print like: + // + // var import_pkg = await module.importStmt(".../icon.js", (module) => ..., "Icon1"); + // var import_pkg = await module.importStmt(".../icon2.js", (module) => ..., "Icon2"); + // [import_pkg.Icon1, import_pkg.Icon2] + // + // The goal is to get this printing: + // var import_pkg_Icon1 = await module.importStmt(".../icon.js", ({ default: module }) => ..., "default"); + // var import_pkg_Icon2 = await module.importStmt(".../icon2.js", ({ default: module }) => ..., "default"); + // [import_pkg_Icon1, import_pkg_Icon2] + var namespace_ref = st.namespace_ref; + var receiver_args = default_receiver_args; + var barrel_actual_alias: ?[]const u8 = null; + if (record.tag == .barrel and !is_bare_import and is_enabled) brk: { + const symbols = ast.symbols; + bun.assert(st.items.len == 1 or st.default_name != null); + + const ref: Ref = if (st.items.len > 0) + st.items[0].name.ref.? + else if (st.default_name) |def| + def.ref.? + else { + bun.debugAssert(false); + break :brk; + }; + const sym = symbols.mut(ref.inner_index); + sym.namespace_alias = null; + + barrel_actual_alias = if (symbols.at(namespace_ref.inner_index).namespace_alias) |nsa| + nsa.alias + else + sym.original_name; + + namespace_ref = ref; + + // ({ actual_alias: module }) => ... + // ------------------------ this destructuring + receiver_args = try allocator.dupe(G.Arg, &.{ + .{ .binding = Binding.alloc(allocator, B.Object{ + .is_single_line = true, + .properties = try allocator.dupe(B.Property, &.{.{ + .key = Expr.init(E.String, .{ .data = barrel_actual_alias.? }, .Empty), + .value = Binding.alloc(allocator, B.Identifier{ .ref = ast.module_ref }, .Empty), + }}), + }, .Empty) }, + }); + } + // module.importSync('path', (module) => ns = module, ['dep', 'etc']) const call = if (is_enabled) call: { const path = if (record.source_index.isValid()) @@ -12693,10 +12966,15 @@ pub const LinkerContext = struct { .data = path.pretty, }, stmt.loc); - const items = try allocator.alloc(Expr, st.items.len); - for (st.items, items) |item, *str| { - str.* = Expr.init(E.String, .{ .data = item.alias }, item.name.loc); - } + const items = if (barrel_actual_alias) |alias| + try allocator.dupe(Expr, &.{Expr.init(E.String, .{ .data = alias }, .Empty)}) + else brk: { + const items = try allocator.alloc(Expr, st.items.len); + for (st.items, items) |item, *str| { + str.* = Expr.init(E.String, .{ .data = item.alias }, item.name.loc); + } + break :brk items; + }; const expr = Expr.init(E.Call, .{ .target = Expr.init(E.Dot, .{ @@ -12718,7 +12996,7 @@ pub const LinkerContext = struct { .body = .{ .stmts = try allocator.dupe(Stmt, &.{Stmt.alloc(S.Return, .{ .value = Expr.assign( - Expr.initIdentifier(st.namespace_ref, st.star_name_loc orelse stmt.loc), + Expr.initIdentifier(namespace_ref, st.star_name_loc orelse stmt.loc), module_id, ), }, stmt.loc)}), @@ -12744,14 +13022,24 @@ pub const LinkerContext = struct { try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.SExpr, .{ .value = call }, stmt.loc)); } else { // 'var namespace = module.importSync(...)' + const binding = Binding.alloc( + allocator, + B.Identifier{ .ref = namespace_ref }, + st.star_name_loc orelse stmt.loc, + ); try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.Local, .{ .kind = .k_var, // remove a tdz .decls = try G.Decl.List.fromSlice(allocator, &.{.{ - .binding = Binding.alloc( - allocator, - B.Identifier{ .ref = st.namespace_ref }, - st.star_name_loc orelse stmt.loc, - ), + .binding = if (barrel_actual_alias) |alias| + Binding.alloc(allocator, B.Object{ + .is_single_line = true, + .properties = try allocator.dupe(B.Property, &.{.{ + .key = Expr.init(E.String, .{ .data = alias }, .Empty), + .value = binding, + }}), + }, .Empty) + else + binding, .value = call, }}), }, stmt.loc)); @@ -12814,7 +13102,7 @@ pub const LinkerContext = struct { } for (parts) |part| { - c.convertStmtsForChunkForBake(part_range.source_index.get(), stmts, part.stmts, allocator, &ast) catch |err| + c.convertStmtsForChunkForBake(stmts, part.stmts, allocator, &ast) catch |err| return .{ .err = err }; } @@ -15499,7 +15787,7 @@ pub const LinkerContext = struct { } } - const ExportStarContext = struct { + pub const ExportStarContext = struct { import_records_list: []const ImportRecord.List, source_index_stack: std.ArrayList(Index.Int), exports_kind: []js_ast.ExportsKind, diff --git a/src/import_record.zig b/src/import_record.zig index ab1ff7958d..32cb11eca7 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -3,7 +3,7 @@ const bun = @import("root").bun; const logger = bun.logger; const std = @import("std"); const Ref = @import("ast/base.zig").Ref; -const Index = @import("ast/base.zig").Index; +const Source = logger.Source; const Api = @import("./api/schema.zig").Api; pub const ImportKind = enum(u8) { @@ -98,12 +98,14 @@ pub const ImportKind = enum(u8) { }; pub const ImportRecord = struct { + pub const Index = bun.GenericIndex(u32, ImportRecord); + range: logger.Range, path: fs.Path, kind: ImportKind, tag: Tag = .none, - source_index: Index = Index.invalid, + source_index: Source.Index = .invalid, print_mode: PrintMode = .normal, @@ -189,6 +191,8 @@ pub const ImportRecord = struct { /// crossover to the SSR graph. See bake.Framework.ServerComponents.separate_ssr_graph bake_resolve_to_ssr_graph, + barrel, + with_type_sqlite, with_type_sqlite_embedded, with_type_text, diff --git a/src/js_ast.zig b/src/js_ast.zig index b8b81ea322..f967ee4386 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -6953,13 +6953,6 @@ pub const Ast = struct { }; } - pub fn initTest(parts: []Part) Ast { - return Ast{ - .parts = Part.List.init(parts), - .runtime_imports = .{}, - }; - } - pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} }; pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void { @@ -7403,6 +7396,10 @@ pub const ExportsKind = enum { // module. esm_with_dynamic_fallback_from_cjs, + /// This file is an optimized barrel file, and itself shouldn't be reachable, + /// as all importer's get their + esm_barrel_file, + const dynamic = std.EnumSet(ExportsKind).init(.{ .esm_with_dynamic_fallback = true, .esm_with_dynamic_fallback_from_cjs = true, diff --git a/src/js_parser.zig b/src/js_parser.zig index be36b1cef9..440b3323c4 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -18963,6 +18963,67 @@ fn NewParser_( for (data.items) |*item| { try p.recordDeclaredSymbol(item.name.ref.?); } + + // we are importing something from a barrel file + if (p.options.features.barrel_files) |barrel_files| { + if (barrel_files.contains(p.import_records.items[data.import_record_index].path.text)) { + const existing_import_record_idx = stmt.data.s_import.import_record_index; + const existing_import_record = &p.import_records.items[existing_import_record_idx]; + // mark it so we can recognize it later in onParseTaskComplete + existing_import_record.tag = .barrel; + + // if we import more than one thing in this statement, break up each + // individual import into its own statement so we can rewrite each path: + // + // ```ts + // /* before */ + // import { Ooga, Booga } from 'dictionary' + // + // /* after */ + // import { Ooga } from 'dictionary/words/Ooga.js' + // import { Booga } from 'dictionary/words/Booga.js' + // ``` + // + // I don't want to make N allocations of arrays that each have 1 item each, + // that is dumb. So we're just going to slice the array. This is fine because + // everything here is arena allocated. + if (data.items.len >= 1) { + const old_items = data.items; + data.items = &.{}; + for (old_items, 0..) |*item, i| { + const new_items = p.allocator.dupe(js_ast.ClauseItem, item[0..1]) catch unreachable; + p.symbols.items[new_items[0].name.ref.?.inner_index].namespace_alias = null; + if (i == 0) { + // data.items = old_items[0..1]; + data.items = new_items; + try stmts.append(stmt.*); + } else { + const new_import_record_idx = p.import_records.items.len; + try p.import_records.append(existing_import_record.*); + const name = p.loadNameFromRef(data.namespace_ref); + const namespace_ref = try p.newSymbol(.other, name); + + try stmts.append(p.s( + S.Import{ + // .items = item[0..1], + .items = new_items, + .import_record_index = @truncate(new_import_record_idx), + .namespace_ref = namespace_ref, + // TODO(zack): support this later + .default_name = null, + .is_single_line = true, + // TODO(zack): support this later + .star_name_loc = null, + }, + item.alias_loc, + )); + } + } + } + + return; + } + } } try stmts.append(stmt.*); @@ -24206,6 +24267,10 @@ pub const ConvertESMExportsForHmr = struct { return; // do not emit a statement here }, .s_export_from => |st| { + if (p.import_records.items[st.import_record_index].tag == .barrel) { + return; + } + const namespace_ref = try ctx.deduplicatedImport( p, st.import_record_index, @@ -24239,6 +24304,13 @@ pub const ConvertESMExportsForHmr = struct { return; }, .s_export_star => |st| { + // we split out barrel imports into separate statements + // ... we don't want to deduplicate them back into a single statement + // here lol + if (p.import_records.items[st.import_record_index].tag == .barrel) { + return; + } + const namespace_ref = try ctx.deduplicatedImport( p, st.import_record_index, @@ -24258,6 +24330,14 @@ pub const ConvertESMExportsForHmr = struct { // named/default imports here as we always rewrite them as // full qualified property accesses (needed for live-bindings) .s_import => |st| { + // we split out barrel imports into separate statements + // ... we don't want to deduplicate them back into a single statement + // here lol + if (p.import_records.items[st.import_record_index].tag == .barrel) { + try ctx.stmts.append(p.allocator, stmt); + return; + } + _ = try ctx.deduplicatedImport( p, st.import_record_index, diff --git a/src/js_printer.zig b/src/js_printer.zig index a9fcaa8640..8bbe83f01d 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -5874,7 +5874,7 @@ pub fn printJSON( } }; var stmts = [_]js_ast.Stmt{stmt}; var parts = [_]js_ast.Part{.{ .stmts = &stmts }}; - const ast = Ast.initTest(&parts); + const ast = Ast.fromParts(&parts); const list = js_ast.Symbol.List.init(ast.symbols.slice()); const nested_list = js_ast.Symbol.NestedList.init(&[_]js_ast.Symbol.List{list}); var renamer = rename.NoOpRenamer.init(js_ast.Symbol.Map.initList(nested_list), source); diff --git a/src/logger.zig b/src/logger.zig index ecb5ca5a94..2f0ce0df16 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -19,7 +19,6 @@ const Ref = @import("./ast/base.zig").Ref; const expect = std.testing.expect; const assert = bun.assert; const StringBuilder = bun.StringBuilder; -const Index = @import("./ast/base.zig").Index; const OOM = bun.OOM; const JSError = bun.JSError; @@ -1331,6 +1330,8 @@ pub inline fn usize2Loc(loc: usize) Loc { } pub const Source = struct { + pub const Index = @import("./ast/base.zig").Index; + path: fs.Path, contents: string, diff --git a/src/options.zig b/src/options.zig index 0a9ac88ef2..07c35b335c 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1606,6 +1606,9 @@ pub const BundleOptions = struct { ignore_module_resolution_errors: bool = false, + /// Enable barrel file optimization + barrel_files: ?*const bun.StringHashMap(void) = null, + pub const ForceNodeEnv = enum { unspecified, development, diff --git a/src/runtime.zig b/src/runtime.zig index 7fa0c80049..baaa644941 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -228,6 +228,8 @@ pub const Runtime = struct { // TODO: make this a bitset of all unsupported features lower_using: bool = true, + barrel_files: ?*const bun.StringHashMap(void) = null, + const hash_fields_for_runtime_transpiler = .{ .top_level_await, .auto_import_jsx, @@ -247,6 +249,99 @@ pub const Runtime = struct { // note that we do not include .inject_jest_globals, as we bail out of the cache entirely if this is true }; + // Taken from: https://github.com/vercel/next.js/blob/d69f796522cb843b959e6d30d6964873cfd14d23/packages/next/src/server/config.ts#L937-L1067 + const default_barrel_package_specifiers = &[_][]const u8{ + "@ant-design/icons", + "@effect/experimental", + "@effect/opentelemetry", + "@effect/platform-browser", + "@effect/platform-bun", + "@effect/platform-node", + "@effect/platform", + "@effect/rpc-http", + "@effect/rpc", + "@effect/schema", + "@effect/sql-mssql", + "@effect/sql-mysql2", + "@effect/sql-pg", + "@effect/sql-squlite-bun", + "@effect/sql-squlite-node", + "@effect/sql-squlite-react-native", + "@effect/sql-squlite-wasm", + "@effect/sql", + "@effect/typeclass", + "@headlessui-float/react", + "@headlessui/react", + "@heroicons/react/20/solid", + "@heroicons/react/24/outline", + "@heroicons/react/24/solid", + "@material-ui/core", + "@material-ui/icons", + "@mui/icons-material", + "@mui/material", + "@tabler/icons-react", + "@tremor/react", + "@visx/visx", + "ahooks", + "antd", + "date-fns", + "effect", + "lodash-es", + "lucide-react", + "mui-core", + "ramda", + "react-bootstrap", + "react-icons/ai", + "react-icons/bi", + "react-icons/bs", + "react-icons/cg", + "react-icons/ci", + "react-icons/di", + "react-icons/fa", + "react-icons/fa6", + "react-icons/fc", + "react-icons/fi", + "react-icons/gi", + "react-icons/go", + "react-icons/gr", + "react-icons/hi", + "react-icons/hi2", + "react-icons/im", + "react-icons/io", + "react-icons/io5", + "react-icons/lia", + "react-icons/lib", + "react-icons/lu", + "react-icons/md", + "react-icons/pi", + "react-icons/ri", + "react-icons/rx", + "react-icons/si", + "react-icons/sl", + "react-icons/tb", + "react-icons/tfi", + "react-icons/ti", + "react-icons/vsc", + "react-icons/wi", + "react-use", + "recharts", + "rxjs", + }; + + pub fn getDefaultBarrelFiles(allocator: std.mem.Allocator) !*bun.StringHashMap(void) { + return getBarrelFilesList(allocator, default_barrel_package_specifiers); + } + + pub fn getBarrelFilesList(allocator: std.mem.Allocator, files: []const []const u8) !*bun.StringHashMap(void) { + var map = try allocator.create(bun.StringHashMap(void)); + map.* = bun.StringHashMap(void).init(allocator); + try map.ensureTotalCapacity(@truncate(files.len)); + for (files) |file| { + map.putAssumeCapacityNoClobber(file, {}); + } + return map; + } + pub fn hashForRuntimeTranspiler(this: *const Features, hasher: *std.hash.Wyhash) void { bun.assert(this.runtime_transpiler_cache != null); diff --git a/src/transpiler.zig b/src/transpiler.zig index a5e20db282..4cd73a29d3 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -1340,7 +1340,7 @@ pub const Transpiler = struct { parts[0] = js_ast.Part{ .stmts = stmts }; return ParseResult{ - .ast = js_ast.Ast.initTest(parts), + .ast = js_ast.Ast.fromParts(parts), .source = source, .loader = loader, .input_fd = input_fd, diff --git a/test/bake/dev-server-harness.ts b/test/bake/dev-server-harness.ts index 478c379301..0277531472 100644 --- a/test/bake/dev-server-harness.ts +++ b/test/bake/dev-server-harness.ts @@ -1,6 +1,7 @@ /// import { Bake, BunFile, Subprocess } from "bun"; import fs, { readFileSync, realpathSync } from "node:fs"; +import { readFile } from "node:fs/promises"; import path from "node:path"; import os from "node:os"; import assert from "node:assert"; @@ -190,7 +191,7 @@ export class Dev { } write(file: string, contents: string, options: { errors?: null | ErrorSpec[]; dedent?: boolean } = {}) { - const snapshot = snapshotCallerLocation(); + const snapshot = snapshotCallerLocationMayFail(); return withAnnotatedStack(snapshot, async () => { await maybeWaitInteractive("write " + file); const wait = this.waitForHotReload(); @@ -284,6 +285,19 @@ export class Dev { }); return client; } + + async read(file: string): Promise { + return await readFile(path.join(this.rootDir, file), "utf8"); + } + + /** + * Writes the file back without any changes + * This is useful for triggering file watchers without modifying content + */ + async writeNoChanges(file: string): Promise { + const content = await this.read(file); + await this.write(file, content, { dedent: false }); + } } type StepFn = (dev: Dev) => Promise; @@ -751,25 +765,27 @@ export class Client extends EventEmitter { this.suppressInteractivePrompt = false; } - async getMostRecentHmrChunk() { - if (!this.#hmrChunk) { - // Wait up to a threshold before giving up - const resolver = Promise.withResolvers(); - this.once("hmr-chunk", () => resolver.resolve()); - this.once("exit", () => resolver.reject(new Error("Client exited while waiting for HMR chunk"))); - let t: any = setTimeout(() => { - t = null; - resolver.reject(new Error("Timeout waiting for HMR chunk")); - }, 1000); - await resolver.promise; - if (t) clearTimeout(t); - } - if (!this.#hmrChunk) { - throw new Error("No HMR chunks received. Make sure storeHotChunks is true"); - } - const chunk = this.#hmrChunk; - this.#hmrChunk = null; - return chunk; + getMostRecentHmrChunk() { + return withAnnotatedStack(snapshotCallerLocation(), async () => { + if (!this.#hmrChunk) { + // Wait up to a threshold before giving up + const resolver = Promise.withResolvers(); + this.once("hmr-chunk", () => resolver.resolve()); + this.once("exit", () => resolver.reject(new Error("Client exited while waiting for HMR chunk"))); + let t: any = setTimeout(() => { + t = null; + resolver.reject(new Error("Timeout waiting for HMR chunk")); + }, 1000); + await resolver.promise; + if (t) clearTimeout(t); + } + if (!this.#hmrChunk) { + throw new Error("No HMR chunks received. Make sure storeHotChunks is true"); + } + const chunk = this.#hmrChunk; + this.#hmrChunk = null; + return chunk; + }); } /** @@ -1316,3 +1332,11 @@ process.on("exit", () => { proc.kill("SIGKILL"); } }); + +export function extractScriptSrc(html: string) { + const scriptUrls = [...html.matchAll(/src="([^"]+.js)"/g)]; + if (scriptUrls.length !== 1) { + throw new Error("Expected 1 source file, got " + scriptUrls.length); + } + return scriptUrls[0][1]; +} diff --git a/test/bake/dev/bundle.test.ts b/test/bake/dev/bundle.test.ts index 83fe5b3525..91c16458d1 100644 --- a/test/bake/dev/bundle.test.ts +++ b/test/bake/dev/bundle.test.ts @@ -1,6 +1,13 @@ // Bundle tests are tests concerning bundling bugs that only occur in DevServer. +import { + Client, + devTest, + emptyHtmlFile, + extractScriptSrc, + minimalFramework, + reactRefreshStub, +} from "../dev-server-harness"; import { dedent } from "bundler/expectBundled"; -import { devTest, emptyHtmlFile, minimalFramework, reactAndRefreshStub, reactRefreshStub } from "../dev-server-harness"; devTest("import identifier doesnt get renamed", { framework: minimalFramework, @@ -99,23 +106,123 @@ devTest("importing a file before it is created", { await c.expectMessage("value: 456"); }, }); -devTest("react refresh - default export function", { - framework: minimalFramework, +devTest("barrel file optimization (lucide-react)", { files: { - ...reactAndRefreshStub, + ...reactRefreshStub, "index.html": emptyHtmlFile({ styles: [], - scripts: ["index.tsx"], + scripts: ["index.ts", "react-refresh/runtime"], }), - "index.tsx": ` - import { render } from 'bun-devserver-react-mock'; - render(); + "index.ts": ` + import { Icon1 } from 'lucide-react'; + import { Icon2 } from 'lucide-react'; + console.log(Icon1()); + console.log(Icon2()); `, - "App.tsx": ` - export default function App() { - return
Hello, world!
; - } + // Current BFO only handles some well-known package names, and only when the + // file is just re-exporting the icons. + "node_modules/lucide-react/index.js": ` + export { default as Icon1 } from './icons/icon1'; + export { default as Icon2 } from './icons/icon2'; + export { default as Icon3 } from './icons/icon3'; + export { default as Icon4 } from './icons/icon4'; `, + ...Object.fromEntries( + [1, 2, 3, 4].map(i => [ + `node_modules/lucide-react/icons/icon${i}.ts`, + `export default function Icon${i}() { return "CAPTURE(${i})"; }`, + ]), + ), + }, + async test(dev) { + function captureIconRefs(text: string) { + const refs = text.matchAll(/CAPTURE\((\d+)\)/g); + return Array.from(refs) + .map(ref => ref[1]) + .sort(); + } + async function fetchScriptSrc() { + const html = await dev.fetch("/").text(); + const srcUrl = extractScriptSrc(html); + return await dev.fetch(srcUrl).text(); + } + + // Should only serve icons 1 and 2 since those were the only ones referenced. + const c = await dev.client("/", { storeHotChunks: true }); + await c.expectMessage("CAPTURE(1)", "CAPTURE(2)"); + { + const src = await fetchScriptSrc(); + const refs = captureIconRefs(src); + expect(refs).toEqual(["1", "2"]); + } + + // Saving index.ts should re-run itself but only serve 'index.ts' + { + await dev.writeNoChanges("index.ts"); + await c.expectMessage("CAPTURE(1)", "CAPTURE(2)"); + const chunk = await c.getMostRecentHmrChunk(); + const keys = eval(chunk); + expect(captureIconRefs(chunk)).toEqual([]); + expect(Object.keys(keys)).toEqual(["index.ts"]); + + const src = await fetchScriptSrc(); + expect(captureIconRefs(src)).toEqual(["1", "2"]); + } + + // Changing the list of icons should + // 1. reload with the one new icon + // 2. rebuild will omit icon 2 (not really special DevServer behavior) + { + await dev.write( + "index.ts", + ` + import { Icon1 } from 'lucide-react'; + import { Icon3 } from 'lucide-react'; + console.log(Icon1()); + console.log(Icon3()); + `, + ); + // 1. + await c.expectMessage("CAPTURE(1)", "CAPTURE(3)"); + const chunk = await c.getMostRecentHmrChunk(); + expect(captureIconRefs(chunk)).toEqual(["3"]); + + // 2. + const src = await fetchScriptSrc(); + expect(captureIconRefs(src)).toEqual(["1", "3"]); + } + + // Saving index.ts should re-run itself but only serve 'index.ts' + { + await dev.writeNoChanges("index.ts"); + await c.expectMessage("CAPTURE(1)", "CAPTURE(3)"); + const chunk = await c.getMostRecentHmrChunk(); + const keys = eval(chunk); + expect(captureIconRefs(chunk)).toEqual([]); + expect(Object.keys(keys)).toEqual(["index.ts"]); + + const src = await fetchScriptSrc(); + expect(captureIconRefs(src)).toEqual(["1", "3"]); + } }, - async test(dev) {}, }); +// devTest("react refresh - default export function", { +// framework: minimalFramework, +// files: { +// ...reactAndRefreshStub, +// "index.html": emptyHtmlFile({ +// styles: [], +// scripts: ["index.tsx"], +// }), +// "index.tsx": ` +// import { render } from 'bun-devserver-react-mock'; +// render(); +// `, +// "App.tsx": ` +// export default function App() { +// return
Hello, world!
; +// } +// `, +// }, +// async test(dev) {}, +// }); diff --git a/test/bake/dev/sourcemap.test.ts b/test/bake/dev/sourcemap.test.ts index 2c99808b0e..8f99fa5e35 100644 --- a/test/bake/dev/sourcemap.test.ts +++ b/test/bake/dev/sourcemap.test.ts @@ -3,7 +3,7 @@ // work because hmr-runtime is minified in release builds, which would affect // the generated line/column numbers across different build configurations. import { expect } from "bun:test"; -import { Dev, devTest, emptyHtmlFile, reactRefreshStub } from "../dev-server-harness"; +import { Dev, devTest, emptyHtmlFile, extractScriptSrc, reactRefreshStub } from "../dev-server-harness"; import { BasicSourceMapConsumer, IndexedSourceMapConsumer, SourceMapConsumer } from "source-map"; devTest("source map emitted for primary chunk", { @@ -78,11 +78,7 @@ type SourceMap = (BasicSourceMapConsumer | IndexedSourceMapConsumer) & { }; async function extractSourceMapHtml(dev: Dev, html: string) { - const scriptUrls = [...html.matchAll(/src="([^"]+.js)"/g)]; - if (scriptUrls.length !== 1) { - throw new Error("Expected 1 source file, got " + scriptUrls.length); - } - const scriptUrl = scriptUrls[0][1]; + const scriptUrl = extractScriptSrc(html); const scriptSource = await dev.fetch(scriptUrl).text(); return extractSourceMap(dev, scriptSource); }