This commit is contained in:
RiskyMH
2025-08-07 18:17:06 +10:00
parent 95bd24e6b0
commit ed3f2d6629
4 changed files with 215 additions and 0 deletions

View File

@@ -496,6 +496,10 @@ pub fn NewParser_(
p.import_records.items[import_record_index].tag = tag;
}
if (state.import_loader) |loader| {
p.import_records.items[import_record_index].loader = loader;
}
p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target;
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
@@ -5854,6 +5858,207 @@ pub fn NewParser_(
} };
}
pub fn handleImportMetaGlobCall(p: *P, call: *E.Call, loc: logger.Loc) Expr {
const glob = @import("../glob.zig");
if (call.args.len == 0) {
p.log.addError(p.source, loc, "import.meta.glob() requires at least one argument") catch unreachable;
return p.newExpr(E.Object{}, loc);
}
// Parse patterns
var patterns = std.ArrayList([]const u8).init(p.allocator);
defer patterns.deinit();
switch (call.args.at(0).data) {
.e_string => |str| patterns.append(str.slice(p.allocator)) catch unreachable,
.e_array => |arr| {
for (arr.items.slice()) |item| {
if (item.data == .e_string) {
patterns.append(item.data.e_string.slice(p.allocator)) catch unreachable;
} else {
p.log.addError(p.source, item.loc, "import.meta.glob() patterns must be string literals") catch unreachable;
return p.newExpr(E.Object{}, loc);
}
}
},
else => {
p.log.addError(p.source, call.args.at(0).loc, "import.meta.glob() patterns must be string literals or an array of string literals") catch unreachable;
return p.newExpr(E.Object{}, loc);
},
}
// Parse options
var query: ?[]const u8 = null;
var import_name: ?[]const u8 = null;
var loader: ?options.Loader = null;
var with_attrs: ?*const E.Object = null;
if (call.args.len >= 2 and call.args.at(1).data == .e_object) {
const obj = call.args.at(1).data.e_object;
if (obj.get("query")) |query_value| {
if (query_value.data == .e_string) {
query = query_value.data.e_string.slice(p.allocator);
}
}
if (obj.get("import")) |import_value| {
if (import_value.data == .e_string) {
import_name = import_value.data.e_string.slice(p.allocator);
}
}
if (obj.get("with")) |with_value| {
if (with_value.data == .e_object) {
with_attrs = with_value.data.e_object;
if (with_attrs.?.get("type")) |type_value| {
if (type_value.data == .e_string) {
loader = options.Loader.fromString(type_value.data.e_string.slice(p.allocator));
}
}
}
}
}
// Find matching files
const source_dir = p.source.path.sourceDir();
var matched_files = bun.StringHashMap(void).init(p.allocator);
defer matched_files.deinit();
var glob_arena = bun.ArenaAllocator.init(p.allocator);
defer glob_arena.deinit();
for (patterns.items) |pattern| {
if (!strings.hasPrefix(pattern, "./") and !strings.hasPrefix(pattern, "../")) {
p.log.addErrorFmt(p.source, call.args.at(0).loc, p.allocator, "Glob pattern \"{s}\" must be a relative path starting with ./ or ../", .{pattern}) catch unreachable;
return p.newExpr(E.Object{}, loc);
}
var walker = glob.BunGlobWalker{};
defer walker.deinit(false);
const clean_pattern = if (strings.hasPrefix(pattern, "./")) pattern[2..] else pattern;
switch (walker.initWithCwd(&glob_arena, clean_pattern, source_dir, true, false, true, false, true) catch unreachable) {
.err => continue,
.result => {},
}
var iter = glob.BunGlobWalker.Iterator{ .walker = &walker };
defer iter.deinit();
switch (iter.init() catch unreachable) {
.err => continue,
.result => {},
}
while (switch (iter.next() catch unreachable) {
.err => null,
.result => |path| path,
}) |path| {
const rel_path = if (strings.hasPrefix(path, source_dir)) path[source_dir.len + @intFromBool(path[source_dir.len] == '/') ..] else path;
var path_buf: bun.PathBuffer = undefined;
const slash_normalized = if (bun.Environment.isWindows)
strings.normalizeSlashesOnly(&path_buf, rel_path, '/')
else
rel_path;
const normalized = if (strings.hasPrefix(slash_normalized, "./"))
p.allocator.dupe(u8, slash_normalized) catch unreachable
else
std.fmt.allocPrint(p.allocator, "./{s}", .{slash_normalized}) catch unreachable;
matched_files.put(normalized, {}) catch unreachable;
}
}
// Sort files
var files = std.ArrayList([]const u8).init(p.allocator);
defer files.deinit();
var iter = matched_files.iterator();
while (iter.next()) |entry| {
files.append(entry.key_ptr.*) catch unreachable;
}
std.sort.block([]const u8, files.items, {}, struct {
fn lessThan(_: void, a: []const u8, b_path: []const u8) bool {
return strings.order(a, b_path) == .lt;
}
}.lessThan);
// Create properties
var properties = p.allocator.alloc(G.Property, files.items.len) catch unreachable;
for (files.items, 0..) |file_path, i| {
const import_path = if (query) |q|
std.fmt.allocPrint(p.allocator, "{s}{s}", .{ file_path, q }) catch unreachable
else
file_path;
const import_record_index = p.addImportRecord(.dynamic, loc, import_path);
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
if (loader) |l| p.import_records.items[import_record_index].loader = l;
const import_expr = p.newExpr(E.Import{
.expr = p.newExpr(E.String{ .data = import_path }, loc),
.options = if (with_attrs) |attrs| blk: {
var with_props = p.allocator.alloc(G.Property, 1) catch unreachable;
with_props[0] = .{
.key = p.newExpr(E.String{ .data = "with" }, loc),
.value = p.newExpr(E.Object{ .properties = attrs.properties }, loc),
};
break :blk p.newExpr(E.Object{ .properties = G.Property.List.init(with_props) }, loc);
} else Expr.empty,
.import_record_index = import_record_index,
}, loc);
const return_expr = if (import_name) |name| blk: {
// Create import('./file').then(m => m.name)
const m_ref = p.newSymbol(.other, "m") catch unreachable;
var arrow_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
arrow_stmts[0] = p.s(S.Return{ .value = p.newExpr(E.Dot{
.target = p.newExpr(E.Identifier{ .ref = m_ref }, loc),
.name = name,
.name_loc = loc,
}, loc) }, loc);
var arrow_args = p.allocator.alloc(G.Arg, 1) catch unreachable;
arrow_args[0] = .{
.binding = p.b(B.Identifier{ .ref = m_ref }, logger.Loc.Empty),
};
const arrow_fn = p.newExpr(E.Arrow{
.args = arrow_args,
.body = .{ .loc = loc, .stmts = arrow_stmts },
.prefer_expr = true,
}, loc);
break :blk p.newExpr(E.Call{
.target = p.newExpr(E.Dot{
.target = import_expr,
.name = "then",
.name_loc = loc,
}, loc),
.args = ExprNodeList.fromSlice(p.allocator, &.{arrow_fn}) catch unreachable,
}, loc);
} else import_expr;
var outer_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
outer_stmts[0] = p.s(S.Return{ .value = return_expr }, loc);
properties[i] = .{
.key = p.newExpr(E.String{ .data = file_path }, loc),
.value = p.newExpr(E.Arrow{
.args = &.{},
.body = .{ .loc = loc, .stmts = outer_stmts },
.prefer_expr = true,
}, loc),
};
}
return p.newExpr(E.Object{
.properties = G.Property.List.init(properties),
}, loc);
}
const ReactRefreshExportKind = enum { named, default };
pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref, export_kind: ReactRefreshExportKind) !void {

View File

@@ -847,6 +847,7 @@ pub const SideEffects = enum(u1) {
.module_exports,
.resolved_specifier_string,
.hot_data,
.import_meta_glob,
=> {},
.hot_accept,
.hot_accept_visited,

View File

@@ -410,6 +410,12 @@ pub fn AstMaybe(
}, .loc = loc };
}
if (strings.eqlComptime(name, "glob")) {
return .{ .data = .{
.e_special = .import_meta_glob,
}, .loc = loc };
}
// Inline import.meta properties for Bake
if (p.options.framework != null) {
if (strings.eqlComptime(name, "dir") or strings.eqlComptime(name, "dirname")) {

View File

@@ -1355,6 +1355,9 @@ pub fn VisitExpr(
if (!p.options.features.hot_module_reloading)
return .{ .data = .e_undefined, .loc = expr.loc };
},
.import_meta_glob => {
return p.handleImportMetaGlobCall(e_, expr.loc);
},
else => {},
};