From a01f9d8e1b39aeb2fba009dbc4b23b8760cdfd0d Mon Sep 17 00:00:00 2001 From: Zack Radisic <56137411+zackradisic@users.noreply.github.com> Date: Fri, 4 Oct 2024 20:23:10 -0700 Subject: [PATCH] Integrate CSS with bundler (#14281) Co-authored-by: Jarred Sumner Co-authored-by: Zack Radisic Co-authored-by: zackradisic Co-authored-by: Zack Radisic --- docs/bundler/index.md | 18 + package.json | 1 + packages/bun-types/bun.d.ts | 7 + src/baby_list.zig | 28 +- src/bun.js/api/JSBundler.zig | 5 + src/bun.js/javascript.zig | 1 - src/bun.js/node/types.zig | 8 +- src/bun_js.zig | 2 + src/bundler.zig | 11 +- src/bundler/bundle_v2.zig | 1390 +++++++++++- src/cli.zig | 5 + src/cli/build_command.zig | 2 + src/crash_handler.zig | 12 +- src/css/css_internals.zig | 9 +- src/css/css_parser.zig | 372 +++- src/css/dependencies.zig | 12 +- src/css/error.zig | 5 +- src/css/media_query.zig | 87 +- src/css/printer.zig | 62 +- src/css/properties/background.zig | 13 +- src/css/properties/border.zig | 3 + src/css/properties/border_image.zig | 171 +- src/css/properties/border_radius.zig | 76 +- src/css/properties/custom.zig | 4 +- src/css/properties/generate_properties.ts | 118 +- src/css/properties/properties_generated.zig | 192 +- src/css/rules/import.zig | 78 + src/css/rules/layer.zig | 16 +- src/css/rules/media.zig | 1 - src/css/rules/property.zig | 4 +- src/css/rules/rules.zig | 8 + src/css/selectors/parser.zig | 9 +- src/css/selectors/selector.zig | 9 +- src/css/values/color_js.zig | 3 +- src/css/values/image.zig | 2 +- src/css/values/length.zig | 10 + src/css/values/percentage.zig | 12 + src/css/values/rect.zig | 50 +- src/css/values/url.zig | 28 +- src/import_record.zig | 2 + src/js_ast.zig | 21 + src/js_printer.zig | 9 + src/options.zig | 19 +- src/resolver/resolver.zig | 2 +- src/string_mutable.zig | 4 + test/bundler/bun-build-api.test.ts | 55 + test/bundler/esbuild/css.test.ts | 146 ++ test/bundler/expectBundled.ts | 3 + test/js/bun/css/css.test.ts | 2153 +++++++++---------- 49 files changed, 3803 insertions(+), 1455 deletions(-) diff --git a/docs/bundler/index.md b/docs/bundler/index.md index 64fb58e247..d5598ec2c6 100644 --- a/docs/bundler/index.md +++ b/docs/bundler/index.md @@ -1090,6 +1090,24 @@ $ bun build ./index.tsx --outdir ./out --loader .png:dataurl --loader .txt:file {% /codetabs %} +### `experimentalCss` + +Whether to enable *experimental* support for bundling CSS files. Defaults to `false`. + +This supports bundling CSS files imported from JS, as well as CSS entrypoints. + +{% codetabs group="a" %} + +```ts#JavaScript +const result = await Bun.build({ + entrypoints: ["./index.ts"], + experimentalCss: true, +}); +// => { success: boolean, outputs: BuildArtifact[], logs: BuildMessage[] } +``` + +{% /codetabs %} + ## Outputs The `Bun.build` function returns a `Promise`, defined as: diff --git a/package.json b/package.json index f10de41f13..da6cb1394f 100644 --- a/package.json +++ b/package.json @@ -43,6 +43,7 @@ "build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release", "build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release", "build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release", + "css-properties": "bun run src/css/properties/generate_properties.ts", "bump": "bun ./scripts/bump.ts", "typecheck": "tsc --noEmit && cd test && bun run typecheck", "fmt": "bun run prettier", diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index 4b92de08e5..ebd1019aa8 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -1595,6 +1595,13 @@ declare module "bun" { * @default false */ bytecode?: boolean; + + /** + * **Experimental** + * + * Enable CSS support. + */ + experimentalCss?: boolean; } namespace Password { diff --git a/src/baby_list.zig b/src/baby_list.zig index baa07cc2de..a758fc8156 100644 --- a/src/baby_list.zig +++ b/src/baby_list.zig @@ -41,6 +41,12 @@ pub fn BabyList(comptime Type: type) type { return l.swapRemove(index); } + pub fn sortAsc( + this: *@This(), + ) void { + bun.strings.sortAsc(this.slice()); + } + pub fn contains(this: @This(), item: []const Type) bool { return this.len > 0 and @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len; } @@ -77,8 +83,17 @@ pub fn BabyList(comptime Type: type) type { }; } + fn assertValidDeepClone(comptime T: type) void { + return switch (T) { + bun.JSAst.Expr, bun.JSAst.G.Property, bun.css.ImportConditions => {}, + else => { + @compileError("Unsupported type for BabyList.deepClone(): " ++ @typeName(Type)); + }, + }; + } + pub fn deepClone(this: @This(), allocator: std.mem.Allocator) !@This() { - if (comptime Type != bun.JSAst.Expr and Type != bun.JSAst.G.Property) @compileError("Unsupported type for BabyList.deepClone()"); + assertValidDeepClone(Type); var list_ = try initCapacity(allocator, this.len); for (this.slice()) |item| { list_.appendAssumeCapacity(try item.deepClone(allocator)); @@ -87,6 +102,17 @@ pub fn BabyList(comptime Type: type) type { return list_; } + /// Same as `deepClone` but doesn't return an error + pub fn deepClone2(this: @This(), allocator: std.mem.Allocator) @This() { + assertValidDeepClone(Type); + var list_ = initCapacity(allocator, this.len) catch bun.outOfMemory(); + for (this.slice()) |item| { + list_.appendAssumeCapacity(item.deepClone(allocator)); + } + + return list_; + } + pub fn clearRetainingCapacity(this: *@This()) void { this.len = 0; } diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 3bc36542c9..afe5f7bef0 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -72,6 +72,7 @@ pub const JSBundler = struct { packages: options.PackagesOption = .bundle, format: options.Format = .esm, bytecode: bool = false, + experimental_css: bool = false, pub const List = bun.StringArrayHashMapUnmanaged(Config); @@ -94,6 +95,10 @@ pub const JSBundler = struct { errdefer this.deinit(allocator); errdefer if (plugins.*) |plugin| plugin.deinit(); + if (config.getTruthy(globalThis, "experimentalCss")) |enable_css| { + this.experimental_css = if (enable_css.isBoolean()) enable_css.toBoolean() else false; + } + // Plugins must be resolved first as they are allowed to mutate the config JSValue if (try config.getArray(globalThis, "plugins")) |array| { var iter = array.arrayIterator(globalThis); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index b669b54554..4e060ac290 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -2742,7 +2742,6 @@ pub const VirtualMachine = struct { )) { .success => |r| r, .failure => |e| { - {} this.log.addErrorFmt( null, logger.Loc.Empty, diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 36785b0596..6ff1448c06 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -69,7 +69,13 @@ pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { err: ErrorType, result: ReturnType, - pub const Tag = enum { err, result }; + /// NOTE: this has to have a well defined layout (e.g. setting to `u8`) + /// experienced a bug with a Maybe(void, void) + /// creating the `err` variant of this type + /// resulted in Zig incorrectly setting the tag, leading to a switch + /// statement to just not work. + /// we (Zack, Dylan, Dave, Mason) observed that it was set to 0xFF in ReleaseFast in the debugger + pub const Tag = enum(u8) { err, result }; pub const retry: @This() = if (hasRetry) .{ .err = ErrorType.retry } else .{ .err = ErrorType{} }; diff --git a/src/bun_js.zig b/src/bun_js.zig index 828a9b0b1d..e5eff889ce 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -93,6 +93,8 @@ pub const Run = struct { b.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers; b.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace; + b.options.experimental_css = ctx.bundler_options.experimental_css; + // b.options.minify_syntax = ctx.bundler_options.minify_syntax; switch (ctx.debug.macros) { diff --git a/src/bundler.zig b/src/bundler.zig index ece1f99be8..32b0c8dba9 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -934,11 +934,8 @@ pub const Bundler = struct { Output.panic("TODO: dataurl, base64", .{}); // TODO }, .css => { - if (comptime bun.FeatureFlags.css) { - const Arena = @import("../src/mimalloc_arena.zig").Arena; - - var arena = Arena.init() catch @panic("oopsie arena no good"); - const alloc = arena.allocator(); + if (bundler.options.experimental_css) { + const alloc = bundler.allocator; const entry = bundler.resolver.caches.fs.readFileWithAllocator( bundler.allocator, @@ -953,11 +950,11 @@ pub const Bundler = struct { }; const source = logger.Source.initRecycledFile(.{ .path = file_path, .contents = entry.contents }, bundler.allocator) catch return null; _ = source; // - switch (bun.css.StyleSheet(bun.css.DefaultAtRule).parse(alloc, entry.contents, bun.css.ParserOptions.default(alloc, bundler.log))) { + switch (bun.css.StyleSheet(bun.css.DefaultAtRule).parse(alloc, entry.contents, bun.css.ParserOptions.default(alloc, bundler.log), null)) { .result => |v| { const result = v.toCss(alloc, bun.css.PrinterOptions{ .minify = bun.getenvTruthy("BUN_CSS_MINIFY"), - }) catch |e| { + }, null) catch |e| { bun.handleErrorReturnTrace(e, @errorReturnTrace()); return null; }; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 88900b5c35..9902685971 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -771,7 +771,7 @@ pub const BundleV2 = struct { // Handle onLoad plugins as entry points if (!this.enqueueOnLoadPluginIfNeeded(task)) { - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable; this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; @@ -860,6 +860,8 @@ pub const BundleV2 = struct { this.linker.options.emit_dce_annotations = bundler.options.emit_dce_annotations; this.linker.options.ignore_dce_annotations = bundler.options.ignore_dce_annotations; + this.linker.options.experimental_css = bundler.options.experimental_css; + this.linker.options.source_maps = bundler.options.source_map; this.linker.options.tree_shaking = bundler.options.tree_shaking; this.linker.options.public_path = bundler.options.public_path; @@ -1121,7 +1123,7 @@ pub const BundleV2 = struct { // Handle onLoad plugins if (!this.enqueueOnLoadPluginIfNeeded(task)) { - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable; this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; @@ -1174,7 +1176,7 @@ pub const BundleV2 = struct { // Handle onLoad plugins if (!this.enqueueOnLoadPluginIfNeeded(task)) { - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable; this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; @@ -1452,6 +1454,7 @@ pub const BundleV2 = struct { bundler.options.code_splitting = config.code_splitting; bundler.options.emit_dce_annotations = config.emit_dce_annotations orelse !config.minify.whitespace; bundler.options.ignore_dce_annotations = config.ignore_dce_annotations; + bundler.options.experimental_css = config.experimental_css; bundler.configureLinker(); try bundler.configureDefines(); @@ -1756,7 +1759,7 @@ pub const BundleV2 = struct { // Handle onLoad plugins if (!this.enqueueOnLoadPluginIfNeeded(task)) { - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable; this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data; @@ -2475,7 +2478,7 @@ pub const BundleV2 = struct { continue; } - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = new_task.source_index.get() }) catch unreachable; new_input_file.side_effects = _resolver.SideEffects.no_side_effects__pure_data; @@ -2486,7 +2489,7 @@ pub const BundleV2 = struct { graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&new_task.task)); } else { const loader = value.loader orelse graph.input_files.items(.source)[existing.value_ptr.*].path.loader(&this.bundler.options.loaders) orelse options.Loader.file; - if (loader.shouldCopyForBundling()) { + if (loader.shouldCopyForBundling(this.bundler.options.experimental_css)) { var additional_files: *BabyList(AdditionalFile) = &graph.input_files.items(.additional_files)[result.source.index.get()]; additional_files.push(this.graph.allocator, .{ .source_index = existing.value_ptr.* }) catch unreachable; graph.estimated_file_loader_count += 1; @@ -3083,7 +3086,9 @@ pub const ParseTask = struct { const root = Expr.init(E.UTF8String, E.UTF8String{ .data = source.contents, }, Logger.Loc{ .start = 0 }); - return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + ast.addUrlForCss(allocator, bundler.options.experimental_css, &source, "text/plain"); + return ast; }, .sqlite_embedded, .sqlite => { @@ -3179,16 +3184,43 @@ pub const ParseTask = struct { unique_key_for_additional_file.* = unique_key; return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); }, - // TODO: css - else => { - const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; - const root = Expr.init(E.String, E.String{ - .data = unique_key, - }, Logger.Loc{ .start = 0 }); - unique_key_for_additional_file.* = unique_key; - return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + .css => { + if (bundler.options.experimental_css) { + // const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; + // unique_key_for_additional_file.* = unique_key; + const root = Expr.init(E.Object, E.Object{}, Logger.Loc{ .start = 0 }); + var import_records = BabyList(ImportRecord){}; + const source_code = source.contents; + const css_ast = + switch (bun.css.StyleSheet(bun.css.DefaultAtRule).parseBundler( + allocator, + source_code, + bun.css.ParserOptions.default(allocator, bundler.log), + &import_records, + )) { + .result => |v| v, + .err => |e| { + log.addErrorFmt(&source, Logger.Loc.Empty, allocator, "{}", .{e.kind}) catch unreachable; + return error.SyntaxError; + }, + }; + const css_ast_heap = bun.create(allocator, bun.css.BundlerStyleSheet, css_ast); + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + ast.css = css_ast_heap; + ast.import_records = import_records; + return ast; + } }, + else => {}, } + const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; + const root = Expr.init(E.String, E.String{ + .data = unique_key, + }, Logger.Loc{ .start = 0 }); + unique_key_for_additional_file.* = unique_key; + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); + ast.addUrlForCss(allocator, bundler.options.experimental_css, &source, null); + return ast; } fn run_( @@ -3231,7 +3263,7 @@ pub const ParseTask = struct { } break :brk resolver.caches.fs.readFileWithAllocator( - if (loader.shouldCopyForBundling()) + if (loader.shouldCopyForBundling(this.ctx.bundler.options.experimental_css)) // The OutputFile will own the memory for the contents bun.default_allocator else @@ -3382,8 +3414,7 @@ pub const ParseTask = struct { }; ast.target = target; - - if (ast.parts.len <= 1) { + if (ast.parts.len <= 1 and ast.css == null) { task.side_effects = .no_side_effects__empty_ast; } @@ -3406,7 +3437,7 @@ pub const ParseTask = struct { .unique_key_for_additional_file = unique_key_for_additional_file, // Hash the files in here so that we do it in parallel. - .content_hash_for_additional_file = if (loader.shouldCopyForBundling()) + .content_hash_for_additional_file = if (loader.shouldCopyForBundling(this.ctx.bundler.options.experimental_css)) ContentHasher.run(source.contents) else 0, @@ -4547,6 +4578,7 @@ pub const LinkerContext = struct { minify_whitespace: bool = false, minify_syntax: bool = false, minify_identifiers: bool = false, + experimental_css: bool = false, source_maps: options.SourceMapOption = .none, target: options.Target = .browser, @@ -4883,8 +4915,10 @@ pub const LinkerContext = struct { defer arena.deinit(); var temp_allocator = arena.allocator(); - var js_chunks = bun.StringArrayHashMap(Chunk).init(this.allocator); + var js_chunks = bun.StringArrayHashMap(Chunk).init(temp_allocator); try js_chunks.ensureUnusedCapacity(this.graph.entry_points.len); + var css_chunks = bun.StringArrayHashMap(Chunk).init(temp_allocator); + // try css_chunks.ensureUnusedCapacity(this.graph.entry_points.len); const entry_source_indices = this.graph.entry_points.items(.source_index); @@ -4895,10 +4929,35 @@ pub const LinkerContext = struct { var entry_bits = &this.graph.files.items(.entry_bits)[source_index]; entry_bits.set(entry_bit); + if (this.options.experimental_css) { + if (this.graph.ast.items(.css)[source_index]) |*css| { + _ = css; // autofix + // Create a chunk for the entry point here to ensure that the chunk is + // always generated even if the resulting file is empty + const css_chunk_entry = try css_chunks.getOrPut(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len))); + // const css_chunk_entry = try js_chunks.getOrPut(); + const order = this.findImportedFilesInCSSOrder(temp_allocator, &[_]Index{Index.init(source_index)}); + css_chunk_entry.value_ptr.* = .{ + .entry_point = .{ + .entry_point_id = entry_bit, + .source_index = source_index, + .is_entry_point = true, + }, + .entry_bits = entry_bits.*, + .content = .{ + .css = .{ + .imports_in_chunk_in_order = order, + .asts = this.allocator.alloc(bun.css.BundlerStyleSheet, order.len) catch bun.outOfMemory(), + }, + }, + .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + }; + continue; + } + } // Create a chunk for the entry point here to ensure that the chunk is // always generated even if the resulting file is empty const js_chunk_entry = try js_chunks.getOrPut(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len))); - js_chunk_entry.value_ptr.* = .{ .entry_point = .{ .entry_point_id = entry_bit, @@ -4911,6 +4970,45 @@ pub const LinkerContext = struct { }, .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), }; + + if (this.options.experimental_css) { + // If this JS entry point has an associated CSS entry point, generate it + // now. This is essentially done by generating a virtual CSS file that + // only contains "@import" statements in the order that the files were + // discovered in JS source order, where JS source order is arbitrary but + // consistent for dynamic imports. Then we run the CSS import order + // algorithm to determine the final CSS file order for the chunk. + const css_source_indices = this.findImportedCSSFilesInJSOrder(temp_allocator, Index.init(source_index)); + if (css_source_indices.len > 0) { + const order = this.findImportedFilesInCSSOrder(temp_allocator, css_source_indices.slice()); + var css_files_wth_parts_in_chunk = std.AutoArrayHashMapUnmanaged(Index.Int, void){}; + for (order.slice()) |entry| { + if (entry.kind == .source_index) { + css_files_wth_parts_in_chunk.put(this.allocator, entry.kind.source_index.get(), {}) catch bun.outOfMemory(); + } + } + const css_chunk_entry = try css_chunks.getOrPut(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len))); + // const css_chunk_entry = try js_chunks.getOrPut(try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len))); + css_chunk_entry.value_ptr.* = .{ + .entry_point = .{ + .entry_point_id = entry_bit, + .source_index = source_index, + .is_entry_point = true, + }, + .entry_bits = entry_bits.*, + .content = .{ + .css = .{ + .imports_in_chunk_in_order = order, + .asts = this.allocator.alloc(bun.css.BundlerStyleSheet, order.len) catch bun.outOfMemory(), + }, + }, + .files_with_parts_in_chunk = css_files_wth_parts_in_chunk, + .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + }; + } + + js_chunk_entry.value_ptr.content.javascript.has_css_chunk = true; + } } var file_entry_bits: []AutoBitSet = this.graph.files.items(.entry_bits); @@ -4924,44 +5022,83 @@ pub const LinkerContext = struct { } }; + const css_reprs = this.graph.ast.items(.css); + // Figure out which JS files are in which chunk - for (this.graph.reachable_files) |source_index| { - if (this.graph.files_live.isSet(source_index.get())) { - const entry_bits: *const AutoBitSet = &file_entry_bits[source_index.get()]; + if (js_chunks.count() > 0) { + for (this.graph.reachable_files) |source_index| { + if (this.graph.files_live.isSet(source_index.get())) { + if (this.graph.ast.items(.css)[source_index.get()] == null) { + const entry_bits: *const AutoBitSet = &file_entry_bits[source_index.get()]; + if (css_reprs[source_index.get()] != null) continue; - if (this.graph.code_splitting) { - var js_chunk_entry = try js_chunks.getOrPut( - try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len)), - ); + if (this.graph.code_splitting) { + var js_chunk_entry = try js_chunks.getOrPut( + try temp_allocator.dupe(u8, entry_bits.bytes(this.graph.entry_points.len)), + ); - if (!js_chunk_entry.found_existing) { - js_chunk_entry.value_ptr.* = .{ - .entry_bits = entry_bits.*, - .entry_point = .{ - .source_index = source_index.get(), - }, - .content = .{ - .javascript = .{}, - }, - .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), - }; + if (!js_chunk_entry.found_existing) { + js_chunk_entry.value_ptr.* = .{ + .entry_bits = entry_bits.*, + .entry_point = .{ + .source_index = source_index.get(), + }, + .content = .{ + .javascript = .{}, + }, + .output_source_map = sourcemap.SourceMapPieces.init(this.allocator), + }; + } + + _ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(source_index.get()))) catch unreachable; + } else { + var handler = Handler{ + .chunks = js_chunks.values(), + .allocator = this.allocator, + .source_id = source_index.get(), + }; + entry_bits.forEach(Handler, &handler, Handler.next); + } } - - _ = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator, @as(u32, @truncate(source_index.get()))) catch unreachable; - } else { - var handler = Handler{ - .chunks = js_chunks.values(), - .allocator = this.allocator, - .source_id = source_index.get(), - }; - entry_bits.forEach(Handler, &handler, Handler.next); } } } - js_chunks.sort(strings.StringArrayByIndexSorter.init(try temp_allocator.dupe(string, js_chunks.keys()))); + // Sort the chunks for determinism. This matters because we use chunk indices + // as sorting keys in a few places. + var sorted_chunks = BabyList(Chunk).initCapacity(this.allocator, js_chunks.count() + css_chunks.count()) catch bun.outOfMemory(); + var sorted_keys = BabyList(string).initCapacity(temp_allocator, @max(js_chunks.count(), css_chunks.count())) catch bun.outOfMemory(); + sorted_keys.appendSliceAssumeCapacity(js_chunks.keys()); + sorted_keys.sortAsc(); + var js_chunk_indices_for_css = std.StringArrayHashMap(u32).init(temp_allocator); + js_chunk_indices_for_css.ensureTotalCapacity(brk: { + var count: u32 = 0; + for (js_chunks.values()) |*chunk| { + if (chunk.content.javascript.has_css_chunk) count += 1; + } + break :brk count; + }) catch bun.outOfMemory(); + for (sorted_keys.slice()) |key| { + const chunk = js_chunks.get(key) orelse unreachable; + if (chunk.content.javascript.has_css_chunk) { + js_chunk_indices_for_css.put(key, sorted_chunks.len) catch unreachable; + } + sorted_chunks.appendAssumeCapacity(chunk); + } + sorted_keys.clearRetainingCapacity(); + for (css_chunks.keys()) |key| { + sorted_keys.appendAssumeCapacity(key); + } + sorted_keys.sortAsc(); + for (sorted_keys.slice()) |key| { + const chunk = css_chunks.get(key) orelse unreachable; + if (js_chunk_indices_for_css.get(key)) |js_chunk_index| { + sorted_chunks.mut(js_chunk_index).content.javascript.css_chunk_index = Index.init(sorted_chunks.len); + } + sorted_chunks.appendAssumeCapacity(chunk); + } - const chunks: []Chunk = js_chunks.values(); + const chunks: []Chunk = sorted_chunks.slice(); const entry_point_chunk_indices: []u32 = this.graph.files.items(.entry_point_chunk_index); // Map from the entry point file to this chunk. We will need this later if @@ -5009,7 +5146,7 @@ pub const LinkerContext = struct { const pathname = Fs.PathName.init(output_paths[chunk.entry_point.entry_point_id].slice()); chunk.template.placeholder.name = pathname.base; - chunk.template.placeholder.ext = "js"; + chunk.template.placeholder.ext = chunk.content.ext(); // this if check is a specific fix for `bun build hi.ts --external '*'`, without leading `./` const dir_path = if (pathname.dir.len > 0) pathname.dir else "."; @@ -5036,11 +5173,16 @@ pub const LinkerContext = struct { defer part_ranges_shared.deinit(); defer parts_prefix_shared.deinit(); for (chunks) |*chunk| { - try this.findImportedPartsInJSOrder( - chunk, - &part_ranges_shared, - &parts_prefix_shared, - ); + switch (chunk.content) { + .javascript => { + try this.findImportedPartsInJSOrder( + chunk, + &part_ranges_shared, + &parts_prefix_shared, + ); + }, + .css => {}, + } } } @@ -5257,6 +5399,460 @@ pub const LinkerContext = struct { chunk.content.javascript.parts_in_chunk_in_order = parts_in_chunk_order; } + // CSS files are traversed in depth-first postorder just like JavaScript. But + // unlike JavaScript import statements, CSS "@import" rules are evaluated every + // time instead of just the first time. + // + // A + // / \ + // B C + // \ / + // D + // + // If A imports B and then C, B imports D, and C imports D, then the CSS + // traversal order is D B D C A. + // + // However, evaluating a CSS file multiple times is sort of equivalent to + // evaluating it once at the last location. So we basically drop all but the + // last evaluation in the order. + // + // The only exception to this is "@layer". Evaluating a CSS file multiple + // times is sort of equivalent to evaluating it once at the first location + // as far as "@layer" is concerned. So we may in some cases keep both the + // first and last locations and only write out the "@layer" information + // for the first location. + pub fn findImportedFilesInCSSOrder(this: *LinkerContext, temp_allocator: std.mem.Allocator, entry_points: []const Index) BabyList(Chunk.CssImportOrder) { + const Visitor = struct { + allocator: std.mem.Allocator, + temp_allocator: std.mem.Allocator, + css_asts: []?*bun.css.BundlerStyleSheet, + all_import_records: []const BabyList(ImportRecord), + + graph: *LinkerGraph, + parse_graph: *Graph, + + has_external_import: bool = false, + visited: BabyList(Index), + order: BabyList(Chunk.CssImportOrder) = .{}, + + pub fn visit( + visitor: *@This(), + source_index: Index, + wrapping_conditions: *BabyList(bun.css.ImportConditions), + wrapping_import_records: *BabyList(*const ImportRecord), + ) void { + + // The CSS specification strangely does not describe what to do when there + // is a cycle. So we are left with reverse-engineering the behavior from a + // real browser. Here's what the WebKit code base has to say about this: + // + // "Check for a cycle in our import chain. If we encounter a stylesheet + // in our parent chain with the same URL, then just bail." + // + // So that's what we do here. See "StyleRuleImport::requestStyleSheet()" in + // WebKit for more information. + for (visitor.visited.slice()) |visitedSourceIndex| { + if (visitedSourceIndex.get() == source_index.get()) { + return; + } + } + + visitor.visited.push( + visitor.temp_allocator, + source_index, + ) catch bun.outOfMemory(); + + const repr: *const bun.css.BundlerStyleSheet = visitor.css_asts[source_index.get()].?; + const top_level_rules = &repr.rules; + + // TODO: should we even do this? @import rules have to be the first rules in the stylesheet, why even allow pre-import layers? + // Any pre-import layers come first + // if len(repr.AST.LayersPreImport) > 0 { + // order = append(order, cssImportOrder{ + // kind: cssImportLayers, + // layers: repr.AST.LayersPreImport, + // conditions: wrappingConditions, + // conditionImportRecords: wrappingImportRecords, + // }) + // } + + defer { + _ = visitor.visited.popOrNull(); + } + + // Iterate over the top-level "@import" rules + var import_record_idx: usize = 0; + for (top_level_rules.v.items) |*rule| { + if (rule.* == .import) { + defer import_record_idx += 1; + const record = visitor.all_import_records[source_index.get()].at(import_record_idx); + + // Follow internal dependencies + if (record.source_index.isValid()) { + // TODO: conditions + // If this import has conditions, fork our state so that the entire + // imported stylesheet subtree is wrapped in all of the conditions + if (rule.import.hasConditions()) { + // Fork our state + var nested_conditions = wrapping_conditions.deepClone2(visitor.allocator); + // var nested_import_records = wrapping_import_records.deepClone(visitor.allocator) catch bun.outOfMemory(); + // _ = nested_import_records; // autofix + + // Clone these import conditions and append them to the state + nested_conditions.push(visitor.allocator, rule.import.conditionsOwned(visitor.allocator)) catch bun.outOfMemory(); + visitor.visit(record.source_index, &nested_conditions, wrapping_import_records); + continue; + } + visitor.visit(record.source_index, wrapping_conditions, wrapping_import_records); + continue; + } + + // TODO + // Record external depednencies + if (!record.is_internal) { + + // If this import has conditions, append it to the list of overall + // conditions for this external import. Note that an external import + // may actually have multiple sets of conditions that can't be + // merged. When this happens we need to generate a nested imported + // CSS file using a data URL. + if (rule.import.hasConditions()) { + var all_conditions = wrapping_conditions.deepClone2(visitor.allocator); + all_conditions.push(visitor.allocator, rule.import.conditionsOwned(visitor.allocator)) catch bun.outOfMemory(); + visitor.order.push( + visitor.allocator, + Chunk.CssImportOrder{ + .kind = .{ + .external_path = record.path, + }, + .conditions = all_conditions, + // .condition_import_records = wrapping_import_records.*, + }, + ) catch bun.outOfMemory(); + } else { + visitor.order.push( + visitor.allocator, + Chunk.CssImportOrder{ + .kind = .{ + .external_path = record.path, + }, + .conditions = wrapping_conditions.*, + // .condition_import_records = visitor.all, + }, + ) catch bun.outOfMemory(); + } + visitor.has_external_import = true; + } + } + } + + // TODO: composes? + + if (comptime bun.Environment.isDebug) { + std.debug.print( + "Lookin' at file: {d}={s}\n", + .{ source_index.get(), visitor.parse_graph.input_files.items(.source)[source_index.get()].path.pretty }, + ); + for (visitor.visited.slice()) |idx| { + std.debug.print( + " Visit: {d}\n", + .{idx.get()}, + ); + } + } + // Accumulate imports in depth-first postorder + visitor.order.push(visitor.allocator, Chunk.CssImportOrder{ + .kind = .{ .source_index = source_index }, + .conditions = wrapping_conditions.*, + }) catch bun.outOfMemory(); + } + }; + + var visitor = Visitor{ + .allocator = this.allocator, + .temp_allocator = temp_allocator, + .graph = &this.graph, + .parse_graph = this.parse_graph, + .visited = BabyList(Index).initCapacity(temp_allocator, 16) catch bun.outOfMemory(), + .css_asts = this.graph.ast.items(.css), + .all_import_records = this.graph.ast.items(.import_records), + }; + var wrapping_conditions: BabyList(bun.css.ImportConditions) = .{}; + var wrapping_import_records: BabyList(*const ImportRecord) = .{}; + // Include all files reachable from any entry point + for (entry_points) |entry_point| { + visitor.visit(entry_point, &wrapping_conditions, &wrapping_import_records); + } + + var order = visitor.order; + var wip_order = BabyList(Chunk.CssImportOrder).initCapacity(temp_allocator, order.len) catch bun.outOfMemory(); + + // CSS syntax unfortunately only allows "@import" rules at the top of the + // file. This means we must hoist all external "@import" rules to the top of + // the file when bundling, even though doing so will change the order of CSS + // evaluation. + if (visitor.has_external_import) { + // Pass 1: Pull out leading "@layer" and external "@import" rules + var is_at_layer_prefix = true; + for (order.slice()) |*entry| { + if ((entry.kind == .layers and is_at_layer_prefix) or entry.kind == .external_path) { + wip_order.push(temp_allocator, entry.*) catch bun.outOfMemory(); + } + if (entry.kind != .layers) { + is_at_layer_prefix = false; + } + } + + // Pass 2: Append everything that we didn't pull out in pass 1 + is_at_layer_prefix = true; + for (order.slice()) |*entry| { + if ((entry.kind != .layers or !is_at_layer_prefix) and entry.kind != .external_path) { + wip_order.push(temp_allocator, entry.*) catch bun.outOfMemory(); + } + if (entry.kind != .layers) { + is_at_layer_prefix = false; + } + } + + order.len = wip_order.len; + @memcpy(order.slice(), wip_order.slice()); + wip_order.clearRetainingCapacity(); + } + + // Next, optimize import order. If there are duplicate copies of an imported + // file, replace all but the last copy with just the layers that are in that + // file. This works because in CSS, the last instance of a declaration + // overrides all previous instances of that declaration. + { + var source_index_duplicates = std.AutoArrayHashMap(u32, BabyList(u32)).init(temp_allocator); + var external_path_duplicates = std.StringArrayHashMap(BabyList(u32)).init(temp_allocator); + + var i: u32 = visitor.order.len; + next_backward: while (i != 0) { + i -= 1; + const entry = visitor.order.at(i); + switch (entry.kind) { + .source_index => |idx| { + const gop = source_index_duplicates.getOrPut(idx.get()) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = BabyList(u32){}; + } + for (gop.value_ptr.slice()) |j| { + // TODO: check conditions are redundant + if (isConditionalImportRedundant(&entry.conditions, &order.at(j).conditions)) { + order.mut(i).kind = .{ + .layers = &.{}, + }; + continue :next_backward; + } + } + gop.value_ptr.push(temp_allocator, i) catch bun.outOfMemory(); + }, + .external_path => |p| { + const gop = external_path_duplicates.getOrPut(p.text) catch bun.outOfMemory(); + if (!gop.found_existing) { + gop.value_ptr.* = BabyList(u32){}; + } + for (gop.value_ptr.slice()) |j| { + // TODO: check conditions are redundant + if (isConditionalImportRedundant(&entry.conditions, &order.at(j).conditions)) { + // Don't remove duplicates entirely. The import conditions may + // still introduce layers to the layer order. Represent this as a + // file with an empty layer list. + order.mut(i).kind = .{ + .layers = &.{}, + }; + continue :next_backward; + } + } + gop.value_ptr.push(temp_allocator, i) catch bun.outOfMemory(); + }, + .layers => {}, + } + } + } + + // TODO: layers + // Then optimize "@layer" rules by removing redundant ones. This loop goes + // forward instead of backward because "@layer" takes effect at the first + // copy instead of the last copy like other things in CSS. + + // TODO: layers + // Finally, merge adjacent "@layer" rules with identical conditions together. + + if (bun.Environment.isDebug) { + std.debug.print("CSS order:\n", .{}); + for (order.slice(), 0..) |entry, i| { + std.debug.print(" {d}: {}\n", .{ i, entry }); + } + } + + return order; + } + + // Given two "@import" rules for the same source index (an earlier one and a + // later one), the earlier one is masked by the later one if the later one's + // condition list is a prefix of the earlier one's condition list. + // + // For example: + // + // // entry.css + // @import "foo.css" supports(display: flex); + // @import "bar.css" supports(display: flex); + // + // // foo.css + // @import "lib.css" screen; + // + // // bar.css + // @import "lib.css"; + // + // When we bundle this code we'll get an import order as follows: + // + // 1. lib.css [supports(display: flex), screen] + // 2. foo.css [supports(display: flex)] + // 3. lib.css [supports(display: flex)] + // 4. bar.css [supports(display: flex)] + // 5. entry.css [] + // + // For "lib.css", the entry with the conditions [supports(display: flex)] should + // make the entry with the conditions [supports(display: flex), screen] redundant. + // + // Note that all of this deliberately ignores the existence of "@layer" because + // that is handled separately. All of this is only for handling unlayered styles. + pub fn isConditionalImportRedundant(earlier: *const BabyList(bun.css.ImportConditions), later: *const BabyList(bun.css.ImportConditions)) bool { + if (later.len > earlier.len) return false; + + for (0..later.len) |i| { + const a = earlier.at(i); + const b = later.at(i); + + // Only compare "@supports" and "@media" if "@layers" is equal + if (a.layersEql(b)) { + // TODO: supports + // TODO: media + const same_supports = true; + const same_media = true; + + // If the import conditions are exactly equal, then only keep + // the later one. The earlier one is redundant. Example: + // + // @import "foo.css" layer(abc) supports(display: flex) screen; + // @import "foo.css" layer(abc) supports(display: flex) screen; + // + // The later one makes the earlier one redundant. + if (same_supports and same_media) { + continue; + } + + // If the media conditions are exactly equal and the later one + // doesn't have any supports conditions, then the later one will + // apply in all cases where the earlier one applies. Example: + // + // @import "foo.css" layer(abc) supports(display: flex) screen; + // @import "foo.css" layer(abc) screen; + // + // The later one makes the earlier one redundant. + if (same_media and b.supports == null) { + continue; + } + + // If the supports conditions are exactly equal and the later one + // doesn't have any media conditions, then the later one will + // apply in all cases where the earlier one applies. Example: + // + // @import "foo.css" layer(abc) supports(display: flex) screen; + // @import "foo.css" layer(abc) supports(display: flex); + // + // The later one makes the earlier one redundant. + if (same_supports and b.media.media_queries.items.len == 0) { + continue; + } + } + + return false; + } + + return true; + } + + // JavaScript modules are traversed in depth-first postorder. This is the + // order that JavaScript modules were evaluated in before the top-level await + // feature was introduced. + // + // A + // / \ + // B C + // \ / + // D + // + // If A imports B and then C, B imports D, and C imports D, then the JavaScript + // traversal order is D B C A. + // + // This function may deviate from ESM import order for dynamic imports (both + // "require()" and "import()"). This is because the import order is impossible + // to determine since the imports happen at run-time instead of compile-time. + // In this case we just pick an arbitrary but consistent order. + pub fn findImportedCSSFilesInJSOrder(this: *LinkerContext, temp_allocator: std.mem.Allocator, entry_point: Index) BabyList(Index) { + var visited = BitSet.initEmpty(temp_allocator, this.graph.files.len) catch bun.outOfMemory(); + var order: BabyList(Index) = .{}; + + const all_import_records = this.graph.ast.items(.import_records); + + const visit = struct { + fn visit( + c: *LinkerContext, + import_records: []const BabyList(ImportRecord), + temp: std.mem.Allocator, + visits: *BitSet, + o: *BabyList(Index), + source_index: Index, + is_css: bool, + ) void { + if (visits.isSet(source_index.get())) return; + visits.set(source_index.get()); + + const records: []ImportRecord = import_records[source_index.get()].slice(); + + for (records) |record| { + if (record.source_index.isValid()) { + // Traverse any files imported by this part. Note that CommonJS calls + // to "require()" count as imports too, sort of as if the part has an + // ESM "import" statement in it. This may seem weird because ESM imports + // are a compile-time concept while CommonJS imports are a run-time + // concept. But we don't want to manipulate