Compare commits

...

3 Commits

Author SHA1 Message Date
Zack Radisic
4ed812dad2 ooga booga 2025-02-18 22:54:59 -08:00
Zack Radisic
4b59f0f28b Revert "Enable tree shaking for the dev server"
This reverts commit 1ecee6d11d.
2025-02-17 17:20:47 -08:00
Zack Radisic
1ecee6d11d Enable tree shaking for the dev server 2025-02-17 16:51:56 -08:00
6 changed files with 393 additions and 13 deletions

View File

@@ -64,7 +64,7 @@ const TranspilerOptions = struct {
tsconfig_buf: []const u8 = "",
macros_buf: []const u8 = "",
log: logger.Log,
runtime: Runtime.Features = Runtime.Features{ .top_level_await = true },
runtime: Runtime.Features,
tree_shaking: bool = false,
trim_unused_imports: ?bool = null,
inlining: bool = false,
@@ -310,8 +310,11 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject) ?JSAst.Ex
fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice) (bun.JSError || bun.OOM)!TranspilerOptions {
const globalThis = globalObject;
const object = args.next() orelse return TranspilerOptions{ .log = logger.Log.init(temp_allocator) };
if (object.isUndefinedOrNull()) return TranspilerOptions{ .log = logger.Log.init(temp_allocator) };
const object = args.next() orelse return TranspilerOptions{
.log = logger.Log.init(temp_allocator),
.runtime = Runtime.Features.default(),
};
if (object.isUndefinedOrNull()) return TranspilerOptions{ .log = logger.Log.init(temp_allocator), .runtime = Runtime.Features.default() };
args.eat();
var allocator = args.arena.allocator();
@@ -320,6 +323,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
.default_loader = .jsx,
.transform = default_transform_options,
.log = logger.Log.init(allocator),
.runtime = Runtime.Features.default(),
};
if (!object.isObject()) {
@@ -711,7 +715,7 @@ pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b
const transpiler_options: TranspilerOptions = if (arguments.len > 0)
try transformOptionsFromJSC(globalThis, temp.allocator(), &args)
else
TranspilerOptions{ .log = logger.Log.init(getAllocator(globalThis)) };
TranspilerOptions{ .log = logger.Log.init(getAllocator(globalThis)), .runtime = Runtime.Features.default() };
if (globalThis.hasException()) {
return error.JSError;

View File

@@ -412,6 +412,16 @@ pub const BundleV2 = struct {
asynchronous: bool = false,
thread_lock: bun.DebugThreadLock,
/// Used by the dev server to apply the barrel file optimization.
///
/// Files which import a barrel file will have import records where
/// `record.tag == .barrel`.
///
/// They get added here so we can process them later and mutate them to point
/// to their final destination.
barrel_importers: std.AutoArrayHashMapUnmanaged(Index.Int, void) = .{},
barrel_files: std.AutoArrayHashMapUnmanaged(Index.Int, void) = .{},
const BakeOptions = struct {
framework: bake.Framework,
client_transpiler: *Transpiler,
@@ -477,6 +487,7 @@ pub const BundleV2 = struct {
}
const ReachableFileVisitor = struct {
barrel_files: *const std.AutoArrayHashMapUnmanaged(Index.Int, void),
reachable: std.ArrayList(Index),
visited: bun.bit_set.DynamicBitSet,
all_import_records: []ImportRecord.List,
@@ -511,7 +522,11 @@ pub const BundleV2 = struct {
}
return;
}
v.visited.set(source_index.get());
// dummy thing we puttin' in here to make it work
if (!v.barrel_files.contains(source_index.get())) {
v.visited.set(source_index.get());
}
if (v.scb_bitset) |scb_bitset| {
if (scb_bitset.isSet(source_index.get())) {
@@ -607,6 +622,7 @@ pub const BundleV2 = struct {
const all_urls_for_css = this.graph.ast.items(.url_for_css);
var visitor = ReachableFileVisitor{
.barrel_files = &this.barrel_files,
.reachable = try std.ArrayList(Index).initCapacity(this.graph.allocator, this.graph.entry_points.items.len + 1),
.visited = try bun.bit_set.DynamicBitSet.initEmpty(this.graph.allocator, this.graph.input_files.len),
.redirects = this.graph.ast.items(.redirect_import_record_index),
@@ -2285,6 +2301,8 @@ pub const BundleV2 = struct {
}
on_parse_finalizers.deinit(bun.default_allocator);
}
defer this.barrel_importers.deinit(bun.default_allocator);
defer this.barrel_files.deinit(bun.default_allocator);
defer this.graph.ast.deinit(bun.default_allocator);
defer this.graph.input_files.deinit(bun.default_allocator);
@@ -2396,6 +2414,7 @@ pub const BundleV2 = struct {
const asts = this.graph.ast.slice();
const css_asts = asts.items(.css);
const symbols = asts.items(.symbols);
const input_files = this.graph.input_files.slice();
const loaders = input_files.items(.loader);
@@ -2407,6 +2426,13 @@ pub const BundleV2 = struct {
asts.items(.target)[1..],
1..,
) |part_list, import_records, maybe_css, target, index| {
if (comptime bun.Environment.isDebug) {
debug("index: {d}, name: {s}", .{ index, sources[index].path.text });
debug(" import records:", .{});
for (import_records.slice()) |*ir| {
debug(" {s}: {d}, {s}", .{ ir.path.text, ir.source_index.get(), @tagName(ir.tag) });
}
}
// Dev Server proceeds even with failed files.
// These files are filtered out via the lack of any parts.
//
@@ -2455,6 +2481,8 @@ pub const BundleV2 = struct {
}
}
this.redirectBarrelImports(@intCast(index), import_records.slice(), &asts.items(.named_imports)[index], symbols);
// Discover all CSS roots.
for (import_records.slice()) |*record| {
if (!record.source_index.isValid()) continue;
@@ -2477,6 +2505,88 @@ pub const BundleV2 = struct {
}
}
// const reachable_files = try this.findReachableFiles();
// for (reachable_files) |index2| {
// if (index2.isRuntime()) continue;
// const index = index2.get();
// const part_list = asts.items(.parts)[index];
// const import_records = asts.items(.import_records)[index];
// const maybe_css = css_asts[index];
// const target = asts.items(.target)[index];
// // Dev Server proceeds even with failed files.
// // These files are filtered out via the lack of any parts.
// //
// // Actual empty files will contain a part exporting an empty object.
// if (part_list.len != 0) {
// if (maybe_css != null) {
// // CSS has restrictions on what files can be imported.
// // This means the file can become an error after
// // resolution, which is not usually the case.
// css_total_files.appendAssumeCapacity(Index.init(index));
// var log = Logger.Log.init(this.graph.allocator);
// defer log.deinit();
// if (this.linker.scanCSSImports(
// @intCast(index),
// import_records.slice(),
// css_asts,
// sources,
// loaders,
// &log,
// ) == .errors) {
// // TODO: it could be possible for a plugin to change
// // the type of loader from whatever it was into a
// // css-compatible loader.
// try dev_server.handleParseTaskFailure(
// error.InvalidCssImport,
// .client,
// sources[index].path.text,
// &log,
// );
// // Since there is an error, do not treat it as a
// // valid CSS chunk.
// _ = start.css_entry_points.swapRemove(Index.init(index));
// }
// } else {
// // HTML files are special cased because they correspond
// // to routes in DevServer. They have a JS chunk too,
// // derived off of the import record list.
// if (loaders[index] == .html) {
// try html_files.put(this.graph.allocator, Index.init(index), {});
// } else {
// js_files.appendAssumeCapacity(Index.init(index));
// // Mark every part live.
// for (part_list.slice()) |*p| {
// p.is_live = true;
// }
// }
// this.redirectBarrelImports(@intCast(index), import_records.slice(), &asts.items(.named_imports)[index], symbols);
// // Discover all CSS roots.
// for (import_records.slice()) |*record| {
// if (!record.source_index.isValid()) continue;
// if (loaders[record.source_index.get()] != .css) continue;
// if (asts.items(.parts)[record.source_index.get()].len == 0) {
// record.source_index = Index.invalid;
// continue;
// }
// const gop = start.css_entry_points.getOrPutAssumeCapacity(record.source_index);
// if (target != .browser)
// gop.value_ptr.* = .{ .imported_on_server = true }
// else if (!gop.found_existing)
// gop.value_ptr.* = .{ .imported_on_server = false };
// }
// }
// } else {
// // Treat empty CSS files for removal.
// _ = start.css_entry_points.swapRemove(Index.init(index));
// }
// }
// Find CSS entry points. Originally, this was computed up front, but
// failed files do not remember their loader, and plugins can
// asynchronously decide a file is CSS.
@@ -2600,6 +2710,64 @@ pub const BundleV2 = struct {
});
}
pub fn redirectBarrelImports(
this: *BundleV2,
source_index: Index.Int,
import_records: []ImportRecord,
named_imports: *JSAst.NamedImports,
all_symbols: []Symbol.List,
) void {
if (this.barrel_importers.count() == 0 or !this.barrel_importers.contains(source_index)) return;
const map = this.pathToSourceIndexMap(.browser);
const all_barrel_named_exports: []const JSAst.NamedExports = this.graph.ast.items(.named_exports);
const all_named_imports: []const JSAst.NamedImports = this.graph.ast.items(.named_imports);
const all_import_records: []const BabyList(ImportRecord) = this.graph.ast.items(.import_records);
for (named_imports.keys(), named_imports.values()) |name_ref, *named_import| {
const import_record = &import_records[named_import.import_record_index];
if (comptime bun.Environment.isDebug) {
debug("OOGA index: {d}", .{source_index});
const ir = import_record;
debug(" {s}: {d}, {s}, {s}, {s}", .{ ir.path.text, ir.source_index.get(), @tagName(ir.tag), if (ir.is_unused) "unused" else "used", if (ir.is_internal) "internal" else "external" });
}
if (import_record.tag != .barrel) continue;
if (import_record.is_unused or import_record.is_internal or import_record.source_index.isValid()) continue;
const barrel_file_index = map.get(import_record.path.hashKey()).?;
const barrel_named_exports: *const JSAst.NamedExports = &all_barrel_named_exports[barrel_file_index];
// Pair assertion: we asserted this was not null in `fn s_import()` in `js_parser.zig`
// when we split up the import statements
bun.assert(named_import.alias != null);
// 1. find the export in the barrel file which corresponds to the named import
// 2. update the symbol which the `name_ref` points to: add a link which points to the symbol of the final destination
// 3. update `import_record.source_index` and _maybe_ `import_record.path` to point to the final destination file
if (barrel_named_exports.getPtr(named_import.alias.?)) |barrel_export| {
// TODO: question I do not know the answer to: when is this NOT a symbol?
bun.assert(name_ref.tag == .symbol);
const symbols = &all_symbols[name_ref.source_index];
symbols.mut(name_ref.inner_index).link = barrel_export.ref;
const barrel_source_idx = barrel_export.ref.source_index;
const barrel_named_imports = &all_named_imports[barrel_source_idx];
const named_import2 = barrel_named_imports.getPtr(barrel_export.ref) orelse @panic("FUCK");
const barrel_import_records = &all_import_records[barrel_source_idx];
const final_destination_path = barrel_import_records.at(named_import2.import_record_index).path;
const final_destination_idx = map.get(final_destination_path.hashKey()).?;
import_record.source_index = Index.init(final_destination_idx);
const path = this.graph.input_files.items(.source)[final_destination_idx].path;
std.debug.print("THE FOCKIN FINAL: {s}\n", .{path.text});
import_record.path = path;
}
// If we're here then the user imported something from the barrel file which never existed in the first place.
// Do nothing and let the bundler handle this missing symbol error itself.
}
}
pub fn enqueueOnResolvePluginIfNeeded(
this: *BundleV2,
source_index: Index.Int,
@@ -2760,6 +2928,11 @@ pub const BundleV2 = struct {
continue;
}
if (import_record.tag == .barrel) {
this.barrel_importers.put(bun.default_allocator, source.index.get(), {}) catch bun.outOfMemory();
this.barrel_files.put(bun.default_allocator, import_record.source_index.get(), {}) catch bun.outOfMemory();
}
if (this.framework) |fw| if (fw.server_components != null) {
switch (ast.target.isServerSide()) {
inline else => |is_server| {
@@ -2988,7 +3161,8 @@ pub const BundleV2 = struct {
break :brk;
}
import_record.source_index = Index.invalid;
// bring this back:
// import_record.source_index = Index.invalid;
if (dev_server.isFileCached(path.text, bake_graph)) |entry| {
const rel = bun.path.relativePlatform(this.transpiler.fs.top_level_dir, path.text, .loose, false);
@@ -3285,7 +3459,10 @@ pub const BundleV2 = struct {
var import_records = result.ast.import_records.clone(this.graph.allocator) catch unreachable;
const input_file_loaders = this.graph.input_files.items(.loader);
const save_import_record_source_index = this.transpiler.options.dev_server == null or
// const save_import_record_source_index = this.transpiler.options.dev_server == null or
// result.loader == .html or
// result.loader == .css;
const save_import_record_source_index =
result.loader == .html or
result.loader == .css;
@@ -4768,6 +4945,7 @@ pub const ParseTask = struct {
opts.features.inlining = transpiler.options.minify_syntax;
opts.output_format = output_format;
opts.features.minify_syntax = transpiler.options.minify_syntax;
opts.features.barrel_files = if (transpiler.options.dev_server != null) runtime.Runtime.Features.defaultBarrelFiles() catch bun.outOfMemory() else .{};
opts.features.minify_identifiers = transpiler.options.minify_identifiers;
opts.features.emit_decorator_metadata = transpiler.options.emit_decorator_metadata;
opts.features.unwrap_commonjs_packages = transpiler.options.unwrap_commonjs_packages;
@@ -16618,6 +16796,7 @@ pub const AstBuilder = struct {
comptime options: js_parser.Parser.Options = .{
.jsx = .{},
.bundle = true,
.features = .default(),
},
comptime import_items_for_namespace: struct {
pub fn get(_: @This(), _: Ref) ?js_parser.ImportItemForNamespaceMap {

View File

@@ -196,6 +196,8 @@ pub const ImportRecord = struct {
with_type_toml,
with_type_file,
barrel,
pub fn loader(this: Tag) ?bun.options.Loader {
return switch (this) {
.with_type_sqlite => .sqlite,

View File

@@ -6490,11 +6490,11 @@ pub const S = struct {
// This object represents all of these types of import statements:
//
// import 'path'
// import {item1, item2} from 'path'
// import * as ns from 'path'
// import defaultItem, {item1, item2} from 'path'
// import defaultItem, * as ns from 'path'
// import 'path' (namespace_ref = <empty>, default_name = null, items.len == 0)
// import {item1, item2} from 'path' (namespace_ref = <empty>, default_name = null, items.len == 2)
// import * as ns from 'path' (namespace_ref = ns, default_name = null, items.len == 0)
// import defaultItem, {item1, item2} from 'path' (namespace_ref = <empty>, default_name = defaultItem, items.len == 2)
// import defaultItem, * as ns from 'path' (namespace_ref = ns, default_name = defaultItem, items.len == 0)
//
// Many parts are optional and can be combined in different ways. The only
// restriction is that you cannot have both a clause and a star namespace.
@@ -7655,6 +7655,7 @@ pub const NamedImport = struct {
};
pub const NamedExport = struct {
/// The symbol that is being exported
ref: Ref,
alias_loc: logger.Loc,
};

View File

@@ -1349,6 +1349,12 @@ pub const ImportScanner = struct {
const name: LocRef = item.name;
const name_ref = name.ref.?;
if (p.source.index.get() == 2) {
if (bun.strings.contains(item.alias, "AlarmClock")) {
debug("WE ADDED THE FOCKIN NAMED IMPORT!!!! {d} {s}", .{ p.source.index.get(), item.alias });
}
}
p.named_imports.putAssumeCapacity(
name_ref,
js_ast.NamedImport{
@@ -2881,7 +2887,7 @@ pub const Parser = struct {
use_define_for_class_fields: bool = false,
suppress_warnings_about_weird_code: bool = true,
filepath_hash_for_hmr: u32 = 0,
features: RuntimeFeatures = .{},
features: RuntimeFeatures,
tree_shaking: bool = false,
bundle: bool = false,
@@ -2942,6 +2948,7 @@ pub const Parser = struct {
var opts = Options{
.ts = loader.isTypeScript(),
.jsx = jsx,
.features = .default(),
};
opts.jsx.parse = loader.isJSX();
return opts;
@@ -18965,6 +18972,64 @@ fn NewParser_(
}
}
// we are importing something from a barrel file
if (data.items.len > 0 and
p.options.features.barrel_files.count() > 0 and
p.options.features.barrel_files.contains(p.import_records.items[data.import_record_index].path.text))
{
const existing_import_record_idx = stmt.data.s_import.import_record_index;
const existing_import_record = &p.import_records.items[existing_import_record_idx];
// mark it so we can recognize it later in onParseTaskComplete
existing_import_record.tag = .barrel;
// if we import more than one thing in this statement, break up each
// individual import into its own statement so we can rewrite each path:
//
// ```ts
// /* before */
// import { Ooga, Booga } from 'dictionary'
//
// /* after */
// import { Ooga } from 'dictionary/words/Ooga.js'
// import { Booga } from 'dictionary/words/Booga.js'
// ```
//
// I don't want to make N allocations of arrays that each have 1 item each,
// that is dumb. So we're just going to slice the array. This is fine because
// everything here is arena allocated.
if (data.items.len >= 1) {
const old_items = data.items;
data.items = &.{};
for (old_items, 0..) |*item, i| {
const new_items = p.allocator.dupe(js_ast.ClauseItem, item[0..1]) catch unreachable;
if (i == 0) {
// data.items = old_items[0..1];
data.items = new_items;
try stmts.append(stmt.*);
} else {
const new_import_record_idx = p.import_records.items.len;
try p.import_records.append(existing_import_record.*);
try stmts.append(p.s(
S.Import{
// .items = item[0..1],
.items = new_items,
.import_record_index = @truncate(new_import_record_idx),
.namespace_ref = data.namespace_ref,
// TODO(zack): support this later
.default_name = null,
.is_single_line = true,
// TODO(zack): support this later
.star_name_loc = null,
},
item.alias_loc,
));
}
}
}
return;
}
try stmts.append(stmt.*);
}
pub fn s_export_clause(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportClause) !void {
@@ -23359,6 +23424,13 @@ fn NewParser_(
hashbang: []const u8,
) !js_ast.Ast {
const allocator = p.allocator;
if (p.source.index.get() == 2) {
debug("THE FOCKIN NAMED IMPORTS", .{});
var iter = p.named_imports.iterator();
while (iter.next()) |entry| {
debug("KEY: {} VALUE: {s}", .{ entry.key_ptr.*, if (entry.value_ptr.alias) |alias| alias else "null" });
}
}
// if (p.options.tree_shaking and p.options.features.trim_unused_imports) {
// p.treeShake(&parts, false);
@@ -24206,6 +24278,13 @@ pub const ConvertESMExportsForHmr = struct {
return; // do not emit a statement here
},
.s_export_from => |st| {
// we split out barrel imports into separate statements
// ... we don't want to deduplicate them back into a single statement
// here lol
if (p.import_records.items[st.import_record_index].tag == .barrel) {
return;
}
const namespace_ref = try ctx.deduplicatedImport(
p,
st.import_record_index,
@@ -24239,6 +24318,13 @@ pub const ConvertESMExportsForHmr = struct {
return;
},
.s_export_star => |st| {
// we split out barrel imports into separate statements
// ... we don't want to deduplicate them back into a single statement
// here lol
if (p.import_records.items[st.import_record_index].tag == .barrel) {
return;
}
const namespace_ref = try ctx.deduplicatedImport(
p,
st.import_record_index,
@@ -24258,6 +24344,14 @@ pub const ConvertESMExportsForHmr = struct {
// named/default imports here as we always rewrite them as
// full qualified property accesses (needed for live-bindings)
.s_import => |st| {
// we split out barrel imports into separate statements
// ... we don't want to deduplicate them back into a single statement
// here lol
if (p.import_records.items[st.import_record_index].tag == .barrel) {
try ctx.stmts.append(p.allocator, stmt);
return;
}
_ = try ctx.deduplicatedImport(
p,
st.import_record_index,

View File

@@ -228,6 +228,15 @@ pub const Runtime = struct {
// TODO: make this a bitset of all unsupported features
lower_using: bool = true,
barrel_files: std.StringArrayHashMapUnmanaged(void),
pub fn default() Features {
return .{
.top_level_await = true,
.barrel_files = .{},
};
}
const hash_fields_for_runtime_transpiler = .{
.top_level_await,
.auto_import_jsx,
@@ -247,6 +256,97 @@ pub const Runtime = struct {
// note that we do not include .inject_jest_globals, as we bail out of the cache entirely if this is true
};
pub fn defaultBarrelFiles() !std.StringArrayHashMapUnmanaged(void) {
// Taken from: https://github.com/vercel/next.js/blob/d69f796522cb843b959e6d30d6964873cfd14d23/packages/next/src/server/config.ts#L937-L1067
const default_files = [_][]const u8{
"lucide-react",
"date-fns",
"lodash-es",
"ramda",
"antd",
"react-bootstrap",
"ahooks",
"@ant-design/icons",
"@headlessui/react",
"@headlessui-float/react",
"@heroicons/react/20/solid",
"@heroicons/react/24/solid",
"@heroicons/react/24/outline",
"@visx/visx",
"@tremor/react",
"rxjs",
"@mui/material",
"@mui/icons-material",
"recharts",
"react-use",
"effect",
"@effect/schema",
"@effect/platform",
"@effect/platform-node",
"@effect/platform-browser",
"@effect/platform-bun",
"@effect/sql",
"@effect/sql-mssql",
"@effect/sql-mysql2",
"@effect/sql-pg",
"@effect/sql-squlite-node",
"@effect/sql-squlite-bun",
"@effect/sql-squlite-wasm",
"@effect/sql-squlite-react-native",
"@effect/rpc",
"@effect/rpc-http",
"@effect/typeclass",
"@effect/experimental",
"@effect/opentelemetry",
"@material-ui/core",
"@material-ui/icons",
"@tabler/icons-react",
"mui-core",
// We don't support wildcard imports for these configs, e.g. `react-icons/*`
// so we need to add them manually.
// In the future, we should consider automatically detecting packages that
// need to be optimized.
"react-icons/ai",
"react-icons/bi",
"react-icons/bs",
"react-icons/cg",
"react-icons/ci",
"react-icons/di",
"react-icons/fa",
"react-icons/fa6",
"react-icons/fc",
"react-icons/fi",
"react-icons/gi",
"react-icons/go",
"react-icons/gr",
"react-icons/hi",
"react-icons/hi2",
"react-icons/im",
"react-icons/io",
"react-icons/io5",
"react-icons/lia",
"react-icons/lib",
"react-icons/lu",
"react-icons/md",
"react-icons/pi",
"react-icons/ri",
"react-icons/rx",
"react-icons/si",
"react-icons/sl",
"react-icons/tb",
"react-icons/tfi",
"react-icons/ti",
"react-icons/vsc",
"react-icons/wi",
};
var map = std.StringArrayHashMapUnmanaged(void){};
try map.ensureTotalCapacity(bun.default_allocator, default_files.len);
for (default_files) |file| {
map.putAssumeCapacity(file, {});
}
return map;
}
pub fn hashForRuntimeTranspiler(this: *const Features, hasher: *std.hash.Wyhash) void {
bun.assert(this.runtime_transpiler_cache != null);