mirror of
https://github.com/oven-sh/bun
synced 2026-02-12 03:48:56 +00:00
Split JS parser into multiple files (#20880)
Splits up js_parser.zig into multiple files. Also changes visitExprInOut to use function calls rather than switch Not ready: - [ ] P.zig is ~70,000 tokens, still needs to get smaller - [x] ~~measure zig build time before & after (is it slower?)~~ no significant impact --------- Co-authored-by: pfgithub <6010774+pfgithub@users.noreply.github.com>
This commit is contained in:
526
src/ast/ConvertESMExportsForHmr.zig
Normal file
526
src/ast/ConvertESMExportsForHmr.zig
Normal file
@@ -0,0 +1,526 @@
|
||||
last_part: *js_ast.Part,
|
||||
// files in node modules will not get hot updates, so the code generation
|
||||
// can be a bit more concise for re-exports
|
||||
is_in_node_modules: bool,
|
||||
imports_seen: bun.StringArrayHashMapUnmanaged(ImportRef) = .{},
|
||||
export_star_props: std.ArrayListUnmanaged(G.Property) = .{},
|
||||
export_props: std.ArrayListUnmanaged(G.Property) = .{},
|
||||
stmts: std.ArrayListUnmanaged(Stmt) = .{},
|
||||
|
||||
const ImportRef = struct {
|
||||
/// Index into ConvertESMExportsForHmr.stmts
|
||||
stmt_index: u32,
|
||||
};
|
||||
|
||||
pub fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void {
|
||||
const new_stmt = switch (stmt.data) {
|
||||
else => brk: {
|
||||
break :brk stmt;
|
||||
},
|
||||
.s_local => |st| stmt: {
|
||||
if (!st.is_export) {
|
||||
break :stmt stmt;
|
||||
}
|
||||
|
||||
st.is_export = false;
|
||||
|
||||
var new_len: usize = 0;
|
||||
for (st.decls.slice()) |*decl_ptr| {
|
||||
const decl = decl_ptr.*; // explicit copy to avoid aliasinng
|
||||
const value = decl.value orelse {
|
||||
st.decls.mut(new_len).* = decl;
|
||||
new_len += 1;
|
||||
try ctx.visitBindingToExport(p, decl.binding);
|
||||
continue;
|
||||
};
|
||||
|
||||
switch (decl.binding.data) {
|
||||
.b_missing => {},
|
||||
|
||||
.b_identifier => |id| {
|
||||
const symbol = p.symbols.items[id.ref.inner_index];
|
||||
|
||||
// if the symbol is not used, we don't need to preserve
|
||||
// a binding in this scope. we can move it to the exports object.
|
||||
if (symbol.use_count_estimate == 0 and value.canBeMoved()) {
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = symbol.original_name }, decl.binding.loc),
|
||||
.value = value,
|
||||
});
|
||||
} else {
|
||||
st.decls.mut(new_len).* = decl;
|
||||
new_len += 1;
|
||||
try ctx.visitBindingToExport(p, decl.binding);
|
||||
}
|
||||
},
|
||||
|
||||
else => {
|
||||
st.decls.mut(new_len).* = decl;
|
||||
new_len += 1;
|
||||
try ctx.visitBindingToExport(p, decl.binding);
|
||||
},
|
||||
}
|
||||
}
|
||||
if (new_len == 0) {
|
||||
return;
|
||||
}
|
||||
st.decls.len = @intCast(new_len);
|
||||
|
||||
break :stmt stmt;
|
||||
},
|
||||
.s_export_default => |st| stmt: {
|
||||
// When React Fast Refresh needs to tag the default export, the statement
|
||||
// cannot be moved, since a local reference is required.
|
||||
if (p.options.features.react_fast_refresh and
|
||||
st.value == .stmt and st.value.stmt.data == .s_function)
|
||||
fast_refresh_edge_case: {
|
||||
const symbol = st.value.stmt.data.s_function.func.name orelse
|
||||
break :fast_refresh_edge_case;
|
||||
const name = p.symbols.items[symbol.ref.?.inner_index].original_name;
|
||||
if (ReactRefresh.isComponentishName(name)) {
|
||||
// Lower to a function statement, and reference the function in the export list.
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
.value = Expr.initIdentifier(symbol.ref.?, stmt.loc),
|
||||
});
|
||||
break :stmt st.value.stmt;
|
||||
}
|
||||
// All other functions can be properly moved.
|
||||
}
|
||||
|
||||
// Try to move the export default expression to the end.
|
||||
const can_be_moved_to_inner_scope = switch (st.value) {
|
||||
.stmt => |s| switch (s.data) {
|
||||
.s_class => |c| c.class.canBeMoved() and (if (c.class.class_name) |name|
|
||||
p.symbols.items[name.ref.?.inner_index].use_count_estimate == 0
|
||||
else
|
||||
true),
|
||||
.s_function => |f| if (f.func.name) |name|
|
||||
p.symbols.items[name.ref.?.inner_index].use_count_estimate == 0
|
||||
else
|
||||
true,
|
||||
else => unreachable,
|
||||
},
|
||||
.expr => |e| switch (e.data) {
|
||||
.e_identifier => true,
|
||||
else => e.canBeMoved(),
|
||||
},
|
||||
};
|
||||
if (can_be_moved_to_inner_scope) {
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
.value = st.value.toExpr(),
|
||||
});
|
||||
// no statement emitted
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, an identifier must be exported
|
||||
switch (st.value) {
|
||||
.expr => {
|
||||
const temp_id = p.generateTempRef("default_export");
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||||
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
.value = Expr.initIdentifier(temp_id, stmt.loc),
|
||||
});
|
||||
|
||||
break :stmt Stmt.alloc(S.Local, .{
|
||||
.kind = .k_const,
|
||||
.decls = try G.Decl.List.fromSlice(p.allocator, &.{
|
||||
.{
|
||||
.binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc),
|
||||
.value = st.value.toExpr(),
|
||||
},
|
||||
}),
|
||||
}, stmt.loc);
|
||||
},
|
||||
.stmt => |s| {
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||||
.value = Expr.initIdentifier(switch (s.data) {
|
||||
.s_class => |class| class.class.class_name.?.ref.?,
|
||||
.s_function => |func| func.func.name.?.ref.?,
|
||||
else => unreachable,
|
||||
}, stmt.loc),
|
||||
});
|
||||
break :stmt s;
|
||||
},
|
||||
}
|
||||
},
|
||||
.s_class => |st| stmt: {
|
||||
|
||||
// Strip the "export" keyword
|
||||
if (!st.is_export) {
|
||||
break :stmt stmt;
|
||||
}
|
||||
|
||||
// Export as CommonJS
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{
|
||||
.data = p.symbols.items[st.class.class_name.?.ref.?.inner_index].original_name,
|
||||
}, stmt.loc),
|
||||
.value = Expr.initIdentifier(st.class.class_name.?.ref.?, stmt.loc),
|
||||
});
|
||||
|
||||
st.is_export = false;
|
||||
|
||||
break :stmt stmt;
|
||||
},
|
||||
.s_function => |st| stmt: {
|
||||
// Strip the "export" keyword
|
||||
if (!st.func.flags.contains(.is_export)) break :stmt stmt;
|
||||
|
||||
st.func.flags.remove(.is_export);
|
||||
|
||||
try ctx.visitRefToExport(
|
||||
p,
|
||||
st.func.name.?.ref.?,
|
||||
null,
|
||||
stmt.loc,
|
||||
false,
|
||||
);
|
||||
|
||||
break :stmt stmt;
|
||||
},
|
||||
.s_export_clause => |st| {
|
||||
for (st.items) |item| {
|
||||
const ref = item.name.ref.?;
|
||||
try ctx.visitRefToExport(p, ref, item.alias, item.name.loc, false);
|
||||
}
|
||||
|
||||
return; // do not emit a statement here
|
||||
},
|
||||
.s_export_from => |st| {
|
||||
const namespace_ref = try ctx.deduplicatedImport(
|
||||
p,
|
||||
st.import_record_index,
|
||||
st.namespace_ref,
|
||||
st.items,
|
||||
stmt.loc,
|
||||
null,
|
||||
stmt.loc,
|
||||
);
|
||||
for (st.items) |*item| {
|
||||
const ref = item.name.ref.?;
|
||||
const symbol = &p.symbols.items[ref.innerIndex()];
|
||||
if (symbol.namespace_alias == null) {
|
||||
symbol.namespace_alias = .{
|
||||
.namespace_ref = namespace_ref,
|
||||
.alias = item.original_name,
|
||||
.import_record_index = st.import_record_index,
|
||||
};
|
||||
}
|
||||
try ctx.visitRefToExport(
|
||||
p,
|
||||
ref,
|
||||
item.alias,
|
||||
item.name.loc,
|
||||
!ctx.is_in_node_modules, // live binding when this may be replaced
|
||||
);
|
||||
|
||||
// imports and export statements have their alias +
|
||||
// original_name swapped. this is likely a design bug in
|
||||
// the parser but since everything uses these
|
||||
// assumptions, this hack is simpler than making it
|
||||
// proper
|
||||
const alias = item.alias;
|
||||
item.alias = item.original_name;
|
||||
item.original_name = alias;
|
||||
}
|
||||
return;
|
||||
},
|
||||
.s_export_star => |st| {
|
||||
const namespace_ref = try ctx.deduplicatedImport(
|
||||
p,
|
||||
st.import_record_index,
|
||||
st.namespace_ref,
|
||||
&.{},
|
||||
stmt.loc,
|
||||
null,
|
||||
stmt.loc,
|
||||
);
|
||||
|
||||
if (st.alias) |alias| {
|
||||
// 'export * as ns from' creates one named property.
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{ .data = alias.original_name }, stmt.loc),
|
||||
.value = Expr.initIdentifier(namespace_ref, stmt.loc),
|
||||
});
|
||||
} else {
|
||||
// 'export * from' creates a spread, hoisted at the top.
|
||||
try ctx.export_star_props.append(p.allocator, .{
|
||||
.kind = .spread,
|
||||
.value = Expr.initIdentifier(namespace_ref, stmt.loc),
|
||||
});
|
||||
}
|
||||
return;
|
||||
},
|
||||
// De-duplicate import statements. It is okay to disregard
|
||||
// named/default imports here as we always rewrite them as
|
||||
// full qualified property accesses (needed for live-bindings)
|
||||
.s_import => |st| {
|
||||
_ = try ctx.deduplicatedImport(
|
||||
p,
|
||||
st.import_record_index,
|
||||
st.namespace_ref,
|
||||
st.items,
|
||||
st.star_name_loc,
|
||||
st.default_name,
|
||||
stmt.loc,
|
||||
);
|
||||
return;
|
||||
},
|
||||
};
|
||||
|
||||
try ctx.stmts.append(p.allocator, new_stmt);
|
||||
}
|
||||
|
||||
/// Deduplicates imports, returning a previously used Ref if present.
|
||||
fn deduplicatedImport(
|
||||
ctx: *ConvertESMExportsForHmr,
|
||||
p: anytype,
|
||||
import_record_index: u32,
|
||||
namespace_ref: Ref,
|
||||
items: []js_ast.ClauseItem,
|
||||
star_name_loc: ?logger.Loc,
|
||||
default_name: ?js_ast.LocRef,
|
||||
loc: logger.Loc,
|
||||
) !Ref {
|
||||
const ir = &p.import_records.items[import_record_index];
|
||||
const gop = try ctx.imports_seen.getOrPut(p.allocator, ir.path.text);
|
||||
if (gop.found_existing) {
|
||||
// Disable this one since an older record is getting used. It isn't
|
||||
// practical to delete this import record entry since an import or
|
||||
// require expression can exist.
|
||||
ir.is_unused = true;
|
||||
|
||||
const stmt = ctx.stmts.items[gop.value_ptr.stmt_index].data.s_import;
|
||||
if (items.len > 0) {
|
||||
if (stmt.items.len == 0) {
|
||||
stmt.items = items;
|
||||
} else {
|
||||
stmt.items = try std.mem.concat(p.allocator, js_ast.ClauseItem, &.{ stmt.items, items });
|
||||
}
|
||||
}
|
||||
if (namespace_ref.isValid()) {
|
||||
if (!stmt.namespace_ref.isValid()) {
|
||||
stmt.namespace_ref = namespace_ref;
|
||||
return namespace_ref;
|
||||
} else {
|
||||
// Erase this namespace ref, but since it may be used in
|
||||
// existing AST trees, a link must be established.
|
||||
const symbol = &p.symbols.items[namespace_ref.innerIndex()];
|
||||
symbol.use_count_estimate = 0;
|
||||
symbol.link = stmt.namespace_ref;
|
||||
if (@hasField(@typeInfo(@TypeOf(p)).pointer.child, "symbol_uses")) {
|
||||
_ = p.symbol_uses.swapRemove(namespace_ref);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (stmt.star_name_loc == null) if (star_name_loc) |stl| {
|
||||
stmt.star_name_loc = stl;
|
||||
};
|
||||
if (stmt.default_name == null) if (default_name) |dn| {
|
||||
stmt.default_name = dn;
|
||||
};
|
||||
return stmt.namespace_ref;
|
||||
}
|
||||
|
||||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.Import, .{
|
||||
.import_record_index = import_record_index,
|
||||
.is_single_line = true,
|
||||
.default_name = default_name,
|
||||
.items = items,
|
||||
.namespace_ref = namespace_ref,
|
||||
.star_name_loc = star_name_loc,
|
||||
}, loc));
|
||||
|
||||
gop.value_ptr.* = .{ .stmt_index = @intCast(ctx.stmts.items.len - 1) };
|
||||
return namespace_ref;
|
||||
}
|
||||
|
||||
fn visitBindingToExport(ctx: *ConvertESMExportsForHmr, p: anytype, binding: Binding) !void {
|
||||
switch (binding.data) {
|
||||
.b_missing => {},
|
||||
.b_identifier => |id| {
|
||||
try ctx.visitRefToExport(p, id.ref, null, binding.loc, false);
|
||||
},
|
||||
.b_array => |array| {
|
||||
for (array.items) |item| {
|
||||
try ctx.visitBindingToExport(p, item.binding);
|
||||
}
|
||||
},
|
||||
.b_object => |object| {
|
||||
for (object.properties) |item| {
|
||||
try ctx.visitBindingToExport(p, item.value);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn visitRefToExport(
|
||||
ctx: *ConvertESMExportsForHmr,
|
||||
p: anytype,
|
||||
ref: Ref,
|
||||
export_symbol_name: ?[]const u8,
|
||||
loc: logger.Loc,
|
||||
is_live_binding_source: bool,
|
||||
) !void {
|
||||
const symbol = p.symbols.items[ref.inner_index];
|
||||
const id = if (symbol.kind == .import)
|
||||
Expr.init(E.ImportIdentifier, .{ .ref = ref }, loc)
|
||||
else
|
||||
Expr.initIdentifier(ref, loc);
|
||||
if (is_live_binding_source or (symbol.kind == .import and !ctx.is_in_node_modules) or symbol.has_been_assigned_to) {
|
||||
// TODO (2024-11-24) instead of requiring getters for live-bindings,
|
||||
// a callback propagation system should be considered. mostly
|
||||
// because here, these might not even be live bindings, and
|
||||
// re-exports are so, so common.
|
||||
//
|
||||
// update(2025-03-05): HMRModule in ts now contains an exhaustive map
|
||||
// of importers. For local live bindings, these can just remember to
|
||||
// mutate the field in the exports object. Re-exports can just be
|
||||
// encoded into the module format, propagated in `replaceModules`
|
||||
const key = Expr.init(E.String, .{
|
||||
.data = export_symbol_name orelse symbol.original_name,
|
||||
}, loc);
|
||||
|
||||
// This is technically incorrect in that we've marked this as a
|
||||
// top level symbol. but all we care about is preventing name
|
||||
// collisions, not necessarily the best minificaiton (dev only)
|
||||
const arg1 = p.generateTempRef(symbol.original_name);
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
|
||||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
|
||||
try p.current_scope.generated.push(p.allocator, arg1);
|
||||
|
||||
// 'get abc() { return abc }'
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.kind = .get,
|
||||
.key = key,
|
||||
.value = Expr.init(E.Function, .{ .func = .{
|
||||
.body = .{
|
||||
.stmts = try p.allocator.dupe(Stmt, &.{
|
||||
Stmt.alloc(S.Return, .{ .value = id }, loc),
|
||||
}),
|
||||
.loc = loc,
|
||||
},
|
||||
} }, loc),
|
||||
});
|
||||
// no setter is added since live bindings are read-only
|
||||
} else {
|
||||
// 'abc,'
|
||||
try ctx.export_props.append(p.allocator, .{
|
||||
.key = Expr.init(E.String, .{
|
||||
.data = export_symbol_name orelse symbol.original_name,
|
||||
}, loc),
|
||||
.value = id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.Part) !void {
|
||||
if (ctx.export_star_props.items.len > 0) {
|
||||
if (ctx.export_props.items.len == 0) {
|
||||
ctx.export_props = ctx.export_star_props;
|
||||
} else {
|
||||
const export_star_len = ctx.export_star_props.items.len;
|
||||
try ctx.export_props.ensureUnusedCapacity(p.allocator, export_star_len);
|
||||
const len = ctx.export_props.items.len;
|
||||
ctx.export_props.items.len += export_star_len;
|
||||
bun.copy(G.Property, ctx.export_props.items[export_star_len..], ctx.export_props.items[0..len]);
|
||||
@memcpy(ctx.export_props.items[0..export_star_len], ctx.export_star_props.items);
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.export_props.items.len > 0) {
|
||||
const obj = Expr.init(E.Object, .{
|
||||
.properties = G.Property.List.fromList(ctx.export_props),
|
||||
}, logger.Loc.Empty);
|
||||
|
||||
// `hmr.exports = ...`
|
||||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
|
||||
.value = Expr.assign(
|
||||
Expr.init(E.Dot, .{
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, logger.Loc.Empty),
|
||||
.name = "exports",
|
||||
.name_loc = logger.Loc.Empty,
|
||||
}, logger.Loc.Empty),
|
||||
obj,
|
||||
),
|
||||
}, logger.Loc.Empty));
|
||||
|
||||
// mark a dependency on module_ref so it is renamed
|
||||
try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 });
|
||||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true });
|
||||
}
|
||||
|
||||
if (p.options.features.react_fast_refresh and p.react_refresh.register_used) {
|
||||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
|
||||
.value = Expr.init(E.Call, .{
|
||||
.target = Expr.init(E.Dot, .{
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, .Empty),
|
||||
.name = "reactRefreshAccept",
|
||||
.name_loc = .Empty,
|
||||
}, .Empty),
|
||||
.args = .init(&.{}),
|
||||
}, .Empty),
|
||||
}, .Empty));
|
||||
}
|
||||
|
||||
// Merge all part metadata into the first part.
|
||||
for (all_parts[0 .. all_parts.len - 1]) |*part| {
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
|
||||
for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
|
||||
const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = v;
|
||||
} else {
|
||||
gop.value_ptr.count_estimate += v.count_estimate;
|
||||
}
|
||||
}
|
||||
part.stmts = &.{};
|
||||
part.declared_symbols.entries.len = 0;
|
||||
part.tag = .dead_due_to_inlining;
|
||||
part.dependencies.clearRetainingCapacity();
|
||||
try part.dependencies.push(p.allocator, .{
|
||||
.part_index = @intCast(all_parts.len - 1),
|
||||
.source_index = p.source.index,
|
||||
});
|
||||
}
|
||||
|
||||
try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
|
||||
try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
|
||||
|
||||
ctx.last_part.stmts = ctx.stmts.items;
|
||||
ctx.last_part.tag = .none;
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Decl = G.Decl;
|
||||
const Property = G.Property;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const ConvertESMExportsForHmr = js_parser.ConvertESMExportsForHmr;
|
||||
const ReactRefresh = js_parser.ReactRefresh;
|
||||
const Ref = js_parser.Ref;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
530
src/ast/ImportScanner.zig
Normal file
530
src/ast/ImportScanner.zig
Normal file
@@ -0,0 +1,530 @@
|
||||
stmts: []Stmt = &.{},
|
||||
kept_import_equals: bool = false,
|
||||
removed_import_equals: bool = false,
|
||||
|
||||
pub fn scan(
|
||||
comptime P: type,
|
||||
p: *P,
|
||||
stmts: []Stmt,
|
||||
will_transform_to_common_js: bool,
|
||||
comptime hot_module_reloading_transformations: bool,
|
||||
hot_module_reloading_context: if (hot_module_reloading_transformations) *ConvertESMExportsForHmr else void,
|
||||
) !ImportScanner {
|
||||
var scanner = ImportScanner{};
|
||||
var stmts_end: usize = 0;
|
||||
const allocator = p.allocator;
|
||||
const is_typescript_enabled: bool = comptime P.parser_features.typescript;
|
||||
|
||||
for (stmts) |_stmt| {
|
||||
var stmt = _stmt; // copy
|
||||
switch (stmt.data) {
|
||||
.s_import => |import_ptr| {
|
||||
var st = import_ptr.*;
|
||||
defer import_ptr.* = st;
|
||||
|
||||
const record: *ImportRecord = &p.import_records.items[st.import_record_index];
|
||||
|
||||
if (record.path.isMacro()) {
|
||||
record.is_unused = true;
|
||||
record.path.is_disabled = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// The official TypeScript compiler always removes unused imported
|
||||
// symbols. However, we deliberately deviate from the official
|
||||
// TypeScript compiler's behavior doing this in a specific scenario:
|
||||
// we are not bundling, symbol renaming is off, and the tsconfig.json
|
||||
// "importsNotUsedAsValues" setting is present and is not set to
|
||||
// "remove".
|
||||
//
|
||||
// This exists to support the use case of compiling partial modules for
|
||||
// compile-to-JavaScript languages such as Svelte. These languages try
|
||||
// to reference imports in ways that are impossible for esbuild to know
|
||||
// about when esbuild is only given a partial module to compile. Here
|
||||
// is an example of some Svelte code that might use esbuild to convert
|
||||
// TypeScript to JavaScript:
|
||||
//
|
||||
// <script lang="ts">
|
||||
// import Counter from './Counter.svelte';
|
||||
// export let name: string = 'world';
|
||||
// </script>
|
||||
// <main>
|
||||
// <h1>Hello {name}!</h1>
|
||||
// <Counter />
|
||||
// </main>
|
||||
//
|
||||
// Tools that use esbuild to compile TypeScript code inside a Svelte
|
||||
// file like this only give esbuild the contents of the <script> tag.
|
||||
// These tools work around this missing import problem when using the
|
||||
// official TypeScript compiler by hacking the TypeScript AST to
|
||||
// remove the "unused import" flags. This isn't possible in esbuild
|
||||
// because esbuild deliberately does not expose an AST manipulation
|
||||
// API for performance reasons.
|
||||
//
|
||||
// We deviate from the TypeScript compiler's behavior in this specific
|
||||
// case because doing so is useful for these compile-to-JavaScript
|
||||
// languages and is benign in other cases. The rationale is as follows:
|
||||
//
|
||||
// * If "importsNotUsedAsValues" is absent or set to "remove", then
|
||||
// we don't know if these imports are values or types. It's not
|
||||
// safe to keep them because if they are types, the missing imports
|
||||
// will cause run-time failures because there will be no matching
|
||||
// exports. It's only safe keep imports if "importsNotUsedAsValues"
|
||||
// is set to "preserve" or "error" because then we can assume that
|
||||
// none of the imports are types (since the TypeScript compiler
|
||||
// would generate an error in that case).
|
||||
//
|
||||
// * If we're bundling, then we know we aren't being used to compile
|
||||
// a partial module. The parser is seeing the entire code for the
|
||||
// module so it's safe to remove unused imports. And also we don't
|
||||
// want the linker to generate errors about missing imports if the
|
||||
// imported file is also in the bundle.
|
||||
//
|
||||
// * If identifier minification is enabled, then using esbuild as a
|
||||
// partial-module transform library wouldn't work anyway because
|
||||
// the names wouldn't match. And that means we're minifying so the
|
||||
// user is expecting the output to be as small as possible. So we
|
||||
// should omit unused imports.
|
||||
//
|
||||
var did_remove_star_loc = false;
|
||||
const keep_unused_imports = !p.options.features.trim_unused_imports;
|
||||
// TypeScript always trims unused imports. This is important for
|
||||
// correctness since some imports might be fake (only in the type
|
||||
// system and used for type-only imports).
|
||||
if (!keep_unused_imports) {
|
||||
var found_imports = false;
|
||||
var is_unused_in_typescript = true;
|
||||
|
||||
if (st.default_name) |default_name| {
|
||||
found_imports = true;
|
||||
const symbol = p.symbols.items[default_name.ref.?.innerIndex()];
|
||||
|
||||
// TypeScript has a separate definition of unused
|
||||
if (is_typescript_enabled and p.ts_use_counts.items[default_name.ref.?.innerIndex()] != 0) {
|
||||
is_unused_in_typescript = false;
|
||||
}
|
||||
|
||||
// Remove the symbol if it's never used outside a dead code region
|
||||
if (symbol.use_count_estimate == 0) {
|
||||
st.default_name = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the star import if it's unused
|
||||
if (st.star_name_loc) |_| {
|
||||
found_imports = true;
|
||||
const symbol = p.symbols.items[st.namespace_ref.innerIndex()];
|
||||
|
||||
// TypeScript has a separate definition of unused
|
||||
if (is_typescript_enabled and p.ts_use_counts.items[st.namespace_ref.innerIndex()] != 0) {
|
||||
is_unused_in_typescript = false;
|
||||
}
|
||||
|
||||
// Remove the symbol if it's never used outside a dead code region
|
||||
if (symbol.use_count_estimate == 0) {
|
||||
// Make sure we don't remove this if it was used for a property
|
||||
// access while bundling
|
||||
var has_any = false;
|
||||
|
||||
if (p.import_items_for_namespace.get(st.namespace_ref)) |entry| {
|
||||
if (entry.count() > 0) {
|
||||
has_any = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!has_any) {
|
||||
st.star_name_loc = null;
|
||||
did_remove_star_loc = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove items if they are unused
|
||||
if (st.items.len > 0) {
|
||||
found_imports = true;
|
||||
var items_end: usize = 0;
|
||||
for (st.items) |item| {
|
||||
const ref = item.name.ref.?;
|
||||
const symbol: Symbol = p.symbols.items[ref.innerIndex()];
|
||||
|
||||
// TypeScript has a separate definition of unused
|
||||
if (is_typescript_enabled and p.ts_use_counts.items[ref.innerIndex()] != 0) {
|
||||
is_unused_in_typescript = false;
|
||||
}
|
||||
|
||||
// Remove the symbol if it's never used outside a dead code region
|
||||
if (symbol.use_count_estimate != 0) {
|
||||
st.items[items_end] = item;
|
||||
items_end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
st.items = st.items[0..items_end];
|
||||
}
|
||||
|
||||
// -- Original Comment --
|
||||
// Omit this statement if we're parsing TypeScript and all imports are
|
||||
// unused. Note that this is distinct from the case where there were
|
||||
// no imports at all (e.g. "import 'foo'"). In that case we want to keep
|
||||
// the statement because the user is clearly trying to import the module
|
||||
// for side effects.
|
||||
//
|
||||
// This culling is important for correctness when parsing TypeScript
|
||||
// because a) the TypeScript compiler does this and we want to match it
|
||||
// and b) this may be a fake module that only exists in the type system
|
||||
// and doesn't actually exist in reality.
|
||||
//
|
||||
// We do not want to do this culling in JavaScript though because the
|
||||
// module may have side effects even if all imports are unused.
|
||||
// -- Original Comment --
|
||||
|
||||
// jarred: I think, in this project, we want this behavior, even in JavaScript.
|
||||
// I think this would be a big performance improvement.
|
||||
// The less you import, the less code you transpile.
|
||||
// Side-effect imports are nearly always done through identifier-less imports
|
||||
// e.g. `import 'fancy-stylesheet-thing/style.css';`
|
||||
// This is a breaking change though. We can make it an option with some guardrail
|
||||
// so maybe if it errors, it shows a suggestion "retry without trimming unused imports"
|
||||
if ((is_typescript_enabled and found_imports and is_unused_in_typescript and !p.options.preserve_unused_imports_ts) or
|
||||
(!is_typescript_enabled and p.options.features.trim_unused_imports and found_imports and st.star_name_loc == null and st.items.len == 0 and st.default_name == null))
|
||||
{
|
||||
// internal imports are presumed to be always used
|
||||
// require statements cannot be stripped
|
||||
if (!record.is_internal and !record.was_originally_require) {
|
||||
record.is_unused = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const namespace_ref = st.namespace_ref;
|
||||
const convert_star_to_clause = !p.options.bundle and (p.symbols.items[namespace_ref.innerIndex()].use_count_estimate == 0);
|
||||
|
||||
if (convert_star_to_clause and !keep_unused_imports) {
|
||||
st.star_name_loc = null;
|
||||
}
|
||||
|
||||
record.contains_default_alias = record.contains_default_alias or st.default_name != null;
|
||||
|
||||
const existing_items: ImportItemForNamespaceMap = p.import_items_for_namespace.get(namespace_ref) orelse
|
||||
ImportItemForNamespaceMap.init(allocator);
|
||||
|
||||
if (p.options.bundle) {
|
||||
if (st.star_name_loc != null and existing_items.count() > 0) {
|
||||
const sorted = try allocator.alloc(string, existing_items.count());
|
||||
defer allocator.free(sorted);
|
||||
for (sorted, existing_items.keys()) |*result, alias| {
|
||||
result.* = alias;
|
||||
}
|
||||
strings.sortDesc(sorted);
|
||||
p.named_imports.ensureUnusedCapacity(p.allocator, sorted.len) catch bun.outOfMemory();
|
||||
|
||||
// Create named imports for these property accesses. This will
|
||||
// cause missing imports to generate useful warnings.
|
||||
//
|
||||
// It will also improve bundling efficiency for internal imports
|
||||
// by still converting property accesses off the namespace into
|
||||
// bare identifiers even if the namespace is still needed.
|
||||
for (sorted) |alias| {
|
||||
const item = existing_items.get(alias).?;
|
||||
p.named_imports.put(
|
||||
p.allocator,
|
||||
item.ref.?,
|
||||
js_ast.NamedImport{
|
||||
.alias = alias,
|
||||
.alias_loc = item.loc,
|
||||
.namespace_ref = namespace_ref,
|
||||
.import_record_index = st.import_record_index,
|
||||
},
|
||||
) catch bun.outOfMemory();
|
||||
|
||||
const name: LocRef = item;
|
||||
const name_ref = name.ref.?;
|
||||
|
||||
// Make sure the printer prints this as a property access
|
||||
var symbol: *Symbol = &p.symbols.items[name_ref.innerIndex()];
|
||||
|
||||
symbol.namespace_alias = G.NamespaceAlias{
|
||||
.namespace_ref = namespace_ref,
|
||||
.alias = alias,
|
||||
.import_record_index = st.import_record_index,
|
||||
.was_originally_property_access = st.star_name_loc != null and existing_items.contains(symbol.original_name),
|
||||
};
|
||||
|
||||
// Also record these automatically-generated top-level namespace alias symbols
|
||||
p.declared_symbols.append(p.allocator, .{
|
||||
.ref = name_ref,
|
||||
.is_top_level = true,
|
||||
}) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
p.named_imports.ensureUnusedCapacity(
|
||||
p.allocator,
|
||||
st.items.len + @as(usize, @intFromBool(st.default_name != null)) + @as(usize, @intFromBool(st.star_name_loc != null)),
|
||||
) catch bun.outOfMemory();
|
||||
|
||||
if (st.star_name_loc) |loc| {
|
||||
record.contains_import_star = true;
|
||||
p.named_imports.putAssumeCapacity(
|
||||
namespace_ref,
|
||||
js_ast.NamedImport{
|
||||
.alias_is_star = true,
|
||||
.alias = "",
|
||||
.alias_loc = loc,
|
||||
.namespace_ref = Ref.None,
|
||||
.import_record_index = st.import_record_index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (st.default_name) |default| {
|
||||
record.contains_default_alias = true;
|
||||
p.named_imports.putAssumeCapacity(
|
||||
default.ref.?,
|
||||
.{
|
||||
.alias = "default",
|
||||
.alias_loc = default.loc,
|
||||
.namespace_ref = namespace_ref,
|
||||
.import_record_index = st.import_record_index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for (st.items) |item| {
|
||||
const name: LocRef = item.name;
|
||||
const name_ref = name.ref.?;
|
||||
|
||||
p.named_imports.putAssumeCapacity(
|
||||
name_ref,
|
||||
js_ast.NamedImport{
|
||||
.alias = item.alias,
|
||||
.alias_loc = name.loc,
|
||||
.namespace_ref = namespace_ref,
|
||||
.import_record_index = st.import_record_index,
|
||||
},
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// ESM requires live bindings
|
||||
// CommonJS does not require live bindings
|
||||
// We load ESM in browsers & in Bun.js
|
||||
// We have to simulate live bindings for cases where the code is bundled
|
||||
// We do not know at this stage whether or not the import statement is bundled
|
||||
// This keeps track of the `namespace_alias` incase, at printing time, we determine that we should print it with the namespace
|
||||
for (st.items) |item| {
|
||||
record.contains_default_alias = record.contains_default_alias or strings.eqlComptime(item.alias, "default");
|
||||
|
||||
const name: LocRef = item.name;
|
||||
const name_ref = name.ref.?;
|
||||
|
||||
try p.named_imports.put(p.allocator, name_ref, js_ast.NamedImport{
|
||||
.alias = item.alias,
|
||||
.alias_loc = name.loc,
|
||||
.namespace_ref = namespace_ref,
|
||||
.import_record_index = st.import_record_index,
|
||||
});
|
||||
|
||||
// Make sure the printer prints this as a property access
|
||||
var symbol: *Symbol = &p.symbols.items[name_ref.innerIndex()];
|
||||
if (record.contains_import_star or st.star_name_loc != null)
|
||||
symbol.namespace_alias = G.NamespaceAlias{
|
||||
.namespace_ref = namespace_ref,
|
||||
.alias = item.alias,
|
||||
.import_record_index = st.import_record_index,
|
||||
.was_originally_property_access = st.star_name_loc != null and existing_items.contains(symbol.original_name),
|
||||
};
|
||||
}
|
||||
|
||||
if (record.was_originally_require) {
|
||||
var symbol = &p.symbols.items[namespace_ref.innerIndex()];
|
||||
symbol.namespace_alias = G.NamespaceAlias{
|
||||
.namespace_ref = namespace_ref,
|
||||
.alias = "",
|
||||
.import_record_index = st.import_record_index,
|
||||
.was_originally_property_access = false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||||
|
||||
record.contains_import_star = record.contains_import_star or st.star_name_loc != null;
|
||||
record.contains_default_alias = record.contains_default_alias or st.default_name != null;
|
||||
|
||||
for (st.items) |*item| {
|
||||
record.contains_default_alias = record.contains_default_alias or strings.eqlComptime(item.alias, "default");
|
||||
record.contains_es_module_alias = record.contains_es_module_alias or strings.eqlComptime(item.alias, "__esModule");
|
||||
}
|
||||
},
|
||||
|
||||
.s_function => |st| {
|
||||
if (st.func.flags.contains(.is_export)) {
|
||||
if (st.func.name) |name| {
|
||||
const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name;
|
||||
try p.recordExport(name.loc, original_name, name.ref.?);
|
||||
} else {
|
||||
try p.log.addRangeError(p.source, logger.Range{ .loc = st.func.open_parens_loc, .len = 2 }, "Exported functions must have a name");
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_class => |st| {
|
||||
if (st.is_export) {
|
||||
if (st.class.class_name) |name| {
|
||||
try p.recordExport(name.loc, p.symbols.items[name.ref.?.innerIndex()].original_name, name.ref.?);
|
||||
} else {
|
||||
try p.log.addRangeError(p.source, logger.Range{ .loc = st.class.body_loc, .len = 0 }, "Exported classes must have a name");
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_local => |st| {
|
||||
if (st.is_export) {
|
||||
for (st.decls.slice()) |decl| {
|
||||
p.recordExportedBinding(decl.binding);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove unused import-equals statements, since those likely
|
||||
// correspond to types instead of values
|
||||
if (st.was_ts_import_equals and !st.is_export and st.decls.len > 0) {
|
||||
var decl = st.decls.ptr[0];
|
||||
|
||||
// Skip to the underlying reference
|
||||
var value = decl.value;
|
||||
if (decl.value != null) {
|
||||
while (true) {
|
||||
if (@as(Expr.Tag, value.?.data) == .e_dot) {
|
||||
value = value.?.data.e_dot.target;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Is this an identifier reference and not a require() call?
|
||||
if (value) |val| {
|
||||
if (@as(Expr.Tag, val.data) == .e_identifier) {
|
||||
// Is this import statement unused?
|
||||
if (@as(Binding.Tag, decl.binding.data) == .b_identifier and p.symbols.items[decl.binding.data.b_identifier.ref.innerIndex()].use_count_estimate == 0) {
|
||||
p.ignoreUsage(val.data.e_identifier.ref);
|
||||
|
||||
scanner.removed_import_equals = true;
|
||||
continue;
|
||||
} else {
|
||||
scanner.kept_import_equals = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_export_default => |st| {
|
||||
// This is defer'd so that we still record export default for identifiers
|
||||
defer {
|
||||
if (st.default_name.ref) |ref| {
|
||||
p.recordExport(st.default_name.loc, "default", ref) catch {};
|
||||
}
|
||||
}
|
||||
|
||||
// Rewrite this export to be:
|
||||
// exports.default =
|
||||
// But only if it's anonymous
|
||||
if (!hot_module_reloading_transformations and will_transform_to_common_js and P != bun.bundle_v2.AstBuilder) {
|
||||
const expr = st.value.toExpr();
|
||||
var export_default_args = try p.allocator.alloc(Expr, 2);
|
||||
export_default_args[0] = p.@"module.exports"(expr.loc);
|
||||
export_default_args[1] = expr;
|
||||
stmt = p.s(S.SExpr{ .value = p.callRuntime(expr.loc, "__exportDefault", export_default_args) }, expr.loc);
|
||||
}
|
||||
},
|
||||
.s_export_clause => |st| {
|
||||
for (st.items) |item| {
|
||||
try p.recordExport(item.alias_loc, item.alias, item.name.ref.?);
|
||||
}
|
||||
},
|
||||
.s_export_star => |st| {
|
||||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||||
|
||||
if (st.alias) |alias| {
|
||||
// "export * as ns from 'path'"
|
||||
try p.named_imports.put(p.allocator, st.namespace_ref, js_ast.NamedImport{
|
||||
.alias = null,
|
||||
.alias_is_star = true,
|
||||
.alias_loc = alias.loc,
|
||||
.namespace_ref = Ref.None,
|
||||
.import_record_index = st.import_record_index,
|
||||
.is_exported = true,
|
||||
});
|
||||
try p.recordExport(alias.loc, alias.original_name, st.namespace_ref);
|
||||
var record = &p.import_records.items[st.import_record_index];
|
||||
record.contains_import_star = true;
|
||||
} else {
|
||||
// "export * from 'path'"
|
||||
try p.export_star_import_records.append(allocator, st.import_record_index);
|
||||
}
|
||||
},
|
||||
.s_export_from => |st| {
|
||||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||||
p.named_imports.ensureUnusedCapacity(p.allocator, st.items.len) catch unreachable;
|
||||
for (st.items) |item| {
|
||||
const ref = item.name.ref orelse p.panic("Expected export from item to have a name {any}", .{st});
|
||||
// Note that the imported alias is not item.Alias, which is the
|
||||
// exported alias. This is somewhat confusing because each
|
||||
// SExportFrom statement is basically SImport + SExportClause in one.
|
||||
try p.named_imports.put(p.allocator, ref, js_ast.NamedImport{
|
||||
.alias_is_star = false,
|
||||
.alias = item.original_name,
|
||||
.alias_loc = item.name.loc,
|
||||
.namespace_ref = st.namespace_ref,
|
||||
.import_record_index = st.import_record_index,
|
||||
.is_exported = true,
|
||||
});
|
||||
try p.recordExport(item.name.loc, item.alias, ref);
|
||||
|
||||
var record = &p.import_records.items[st.import_record_index];
|
||||
if (strings.eqlComptime(item.original_name, "default")) {
|
||||
record.contains_default_alias = true;
|
||||
} else if (strings.eqlComptime(item.original_name, "__esModule")) {
|
||||
record.contains_es_module_alias = true;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (hot_module_reloading_transformations) {
|
||||
try hot_module_reloading_context.convertStmt(p, stmt);
|
||||
} else {
|
||||
stmts[stmts_end] = stmt;
|
||||
stmts_end += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hot_module_reloading_transformations)
|
||||
scanner.stmts = stmts[0..stmts_end];
|
||||
|
||||
return scanner;
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const ImportRecord = bun.ImportRecord;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const Binding = js_ast.Binding;
|
||||
const Expr = js_ast.Expr;
|
||||
const G = js_ast.G;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const ConvertESMExportsForHmr = js_parser.ConvertESMExportsForHmr;
|
||||
const ImportItemForNamespaceMap = js_parser.ImportItemForNamespaceMap;
|
||||
const ImportScanner = js_parser.ImportScanner;
|
||||
const Ref = js_parser.Ref;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const options = js_parser.options;
|
||||
210
src/ast/KnownGlobal.zig
Normal file
210
src/ast/KnownGlobal.zig
Normal file
@@ -0,0 +1,210 @@
|
||||
pub const KnownGlobal = enum {
|
||||
WeakSet,
|
||||
WeakMap,
|
||||
Date,
|
||||
Set,
|
||||
Map,
|
||||
Headers,
|
||||
Response,
|
||||
TextEncoder,
|
||||
TextDecoder,
|
||||
|
||||
pub const map = bun.ComptimeEnumMap(KnownGlobal);
|
||||
|
||||
pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void {
|
||||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
|
||||
const symbol = &symbols[id.innerIndex()];
|
||||
if (symbol.kind != .unbound)
|
||||
return;
|
||||
|
||||
const constructor = map.get(symbol.original_name) orelse return;
|
||||
|
||||
switch (constructor) {
|
||||
.WeakSet, .WeakMap => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new WeakSet()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
switch (e.args.ptr[0].data) {
|
||||
.e_null, .e_undefined => {
|
||||
// "new WeakSet(null)" is pure
|
||||
// "new WeakSet(void 0)" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
},
|
||||
.e_array => |array| {
|
||||
if (array.items.len == 0) {
|
||||
// "new WeakSet([])" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
} else {
|
||||
// "new WeakSet([x])" is impure because an exception is thrown if "x" is not an object
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// "new WeakSet(x)" is impure because the iterator for "x" could have side effects
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
.Date => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new Date()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
switch (e.args.ptr[0].knownPrimitive()) {
|
||||
.null, .undefined, .boolean, .number, .string => {
|
||||
// "new Date('')" is pure
|
||||
// "new Date(0)" is pure
|
||||
// "new Date(null)" is pure
|
||||
// "new Date(true)" is pure
|
||||
// "new Date(false)" is pure
|
||||
// "new Date(undefined)" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
},
|
||||
else => {
|
||||
// "new Date(x)" is impure because the argument could be a string with side effects
|
||||
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.Set => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new Set()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
switch (e.args.ptr[0].data) {
|
||||
.e_array, .e_null, .e_undefined => {
|
||||
// "new Set([a, b, c])" is pure
|
||||
// "new Set(null)" is pure
|
||||
// "new Set(void 0)" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
},
|
||||
else => {
|
||||
// "new Set(x)" is impure because the iterator for "x" could have side effects
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.Headers => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new Headers()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
.Response => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new Response()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
switch (e.args.ptr[0].knownPrimitive()) {
|
||||
.null, .undefined, .boolean, .number, .string => {
|
||||
// "new Response('')" is pure
|
||||
// "new Response(0)" is pure
|
||||
// "new Response(null)" is pure
|
||||
// "new Response(true)" is pure
|
||||
// "new Response(false)" is pure
|
||||
// "new Response(undefined)" is pure
|
||||
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
},
|
||||
else => {
|
||||
// "new Response(x)" is impure
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
.TextDecoder, .TextEncoder => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new TextEncoder()" is pure
|
||||
// "new TextDecoder()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// We _could_ validate the encoding argument
|
||||
// But let's not bother
|
||||
},
|
||||
|
||||
.Map => {
|
||||
const n = e.args.len;
|
||||
|
||||
if (n == 0) {
|
||||
// "new Map()" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
return;
|
||||
}
|
||||
|
||||
if (n == 1) {
|
||||
switch (e.args.ptr[0].data) {
|
||||
.e_null, .e_undefined => {
|
||||
// "new Map(null)" is pure
|
||||
// "new Map(void 0)" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
},
|
||||
.e_array => |array| {
|
||||
var all_items_are_arrays = true;
|
||||
for (array.items.slice()) |item| {
|
||||
if (item.data != .e_array) {
|
||||
all_items_are_arrays = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (all_items_are_arrays) {
|
||||
// "new Map([[a, b], [c, d]])" is pure
|
||||
e.can_be_unwrapped_if_unused = .if_unused;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// "new Map(x)" is impure because the iterator for "x" could have side effects
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const std = @import("std");
|
||||
const Map = std.AutoHashMapUnmanaged;
|
||||
6666
src/ast/P.zig
Normal file
6666
src/ast/P.zig
Normal file
File diff suppressed because it is too large
Load Diff
1528
src/ast/Parser.zig
Normal file
1528
src/ast/Parser.zig
Normal file
File diff suppressed because it is too large
Load Diff
887
src/ast/SideEffects.zig
Normal file
887
src/ast/SideEffects.zig
Normal file
@@ -0,0 +1,887 @@
|
||||
pub const SideEffects = enum(u1) {
|
||||
could_have_side_effects,
|
||||
no_side_effects,
|
||||
|
||||
pub const Result = struct {
|
||||
side_effects: SideEffects,
|
||||
ok: bool = false,
|
||||
value: bool = false,
|
||||
};
|
||||
|
||||
pub fn canChangeStrictToLoose(lhs: Expr.Data, rhs: Expr.Data) bool {
|
||||
const left = lhs.knownPrimitive();
|
||||
const right = rhs.knownPrimitive();
|
||||
return left == right and left != .unknown and left != .mixed;
|
||||
}
|
||||
|
||||
pub fn simplifyBoolean(p: anytype, expr: Expr) Expr {
|
||||
if (!p.options.features.dead_code_elimination) return expr;
|
||||
|
||||
var result: Expr = expr;
|
||||
_simplifyBoolean(p, &result);
|
||||
return result;
|
||||
}
|
||||
|
||||
fn _simplifyBoolean(p: anytype, expr: *Expr) void {
|
||||
while (true) {
|
||||
switch (expr.data) {
|
||||
.e_unary => |e| {
|
||||
if (e.op == .un_not) {
|
||||
// "!!a" => "a"
|
||||
if (e.value.data == .e_unary and e.value.data.e_unary.op == .un_not) {
|
||||
expr.* = e.value.data.e_unary.value;
|
||||
continue;
|
||||
}
|
||||
|
||||
_simplifyBoolean(p, &e.value);
|
||||
}
|
||||
},
|
||||
.e_binary => |e| {
|
||||
switch (e.op) {
|
||||
.bin_logical_and => {
|
||||
const effects = SideEffects.toBoolean(p, e.right.data);
|
||||
if (effects.ok and effects.value and effects.side_effects == .no_side_effects) {
|
||||
// "if (anything && truthyNoSideEffects)" => "if (anything)"
|
||||
expr.* = e.left;
|
||||
continue;
|
||||
}
|
||||
},
|
||||
.bin_logical_or => {
|
||||
const effects = SideEffects.toBoolean(p, e.right.data);
|
||||
if (effects.ok and !effects.value and effects.side_effects == .no_side_effects) {
|
||||
// "if (anything || falsyNoSideEffects)" => "if (anything)"
|
||||
expr.* = e.left;
|
||||
continue;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
pub const toNumber = Expr.Data.toNumber;
|
||||
pub const typeof = Expr.Data.toTypeof;
|
||||
|
||||
pub fn isPrimitiveToReorder(data: Expr.Data) bool {
|
||||
return switch (data) {
|
||||
.e_null,
|
||||
.e_undefined,
|
||||
.e_string,
|
||||
.e_boolean,
|
||||
.e_number,
|
||||
.e_big_int,
|
||||
.e_inlined_enum,
|
||||
.e_require_main,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn simplifyUnusedExpr(p: anytype, expr: Expr) ?Expr {
|
||||
if (!p.options.features.dead_code_elimination) return expr;
|
||||
switch (expr.data) {
|
||||
.e_null,
|
||||
.e_undefined,
|
||||
.e_missing,
|
||||
.e_boolean,
|
||||
.e_number,
|
||||
.e_big_int,
|
||||
.e_string,
|
||||
.e_this,
|
||||
.e_reg_exp,
|
||||
.e_function,
|
||||
.e_arrow,
|
||||
.e_import_meta,
|
||||
.e_inlined_enum,
|
||||
=> return null,
|
||||
|
||||
.e_dot => |dot| {
|
||||
if (dot.can_be_removed_if_unused) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
.e_identifier => |ident| {
|
||||
if (ident.must_keep_due_to_with_stmt) {
|
||||
return expr;
|
||||
}
|
||||
|
||||
if (ident.can_be_removed_if_unused or p.symbols.items[ident.ref.innerIndex()].kind != .unbound) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
.e_if => |ternary| {
|
||||
ternary.yes = simplifyUnusedExpr(p, ternary.yes) orelse ternary.yes.toEmpty();
|
||||
ternary.no = simplifyUnusedExpr(p, ternary.no) orelse ternary.no.toEmpty();
|
||||
|
||||
// "foo() ? 1 : 2" => "foo()"
|
||||
if (ternary.yes.isEmpty() and ternary.no.isEmpty()) {
|
||||
return simplifyUnusedExpr(p, ternary.test_);
|
||||
}
|
||||
|
||||
// "foo() ? 1 : bar()" => "foo() || bar()"
|
||||
if (ternary.yes.isEmpty()) {
|
||||
return Expr.joinWithLeftAssociativeOp(
|
||||
.bin_logical_or,
|
||||
ternary.test_,
|
||||
ternary.no,
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
|
||||
// "foo() ? bar() : 2" => "foo() && bar()"
|
||||
if (ternary.no.isEmpty()) {
|
||||
return Expr.joinWithLeftAssociativeOp(
|
||||
.bin_logical_and,
|
||||
ternary.test_,
|
||||
ternary.yes,
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
},
|
||||
.e_unary => |un| {
|
||||
// These operators must not have any type conversions that can execute code
|
||||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||||
switch (un.op) {
|
||||
.un_void, .un_not => {
|
||||
return simplifyUnusedExpr(p, un.value);
|
||||
},
|
||||
.un_typeof => {
|
||||
// "typeof x" must not be transformed into if "x" since doing so could
|
||||
// cause an exception to be thrown. Instead we can just remove it since
|
||||
// "typeof x" is special-cased in the standard to never throw.
|
||||
if (std.meta.activeTag(un.value.data) == .e_identifier) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return simplifyUnusedExpr(p, un.value);
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
inline .e_call, .e_new => |call| {
|
||||
// A call that has been marked "__PURE__" can be removed if all arguments
|
||||
// can be removed. The annotation causes us to ignore the target.
|
||||
if (call.can_be_unwrapped_if_unused != .never) {
|
||||
if (call.args.len > 0) {
|
||||
const joined = Expr.joinAllWithCommaCallback(call.args.slice(), @TypeOf(p), p, comptime simplifyUnusedExpr, p.allocator);
|
||||
if (joined != null and call.can_be_unwrapped_if_unused == .if_unused_and_toString_safe) {
|
||||
@branchHint(.unlikely);
|
||||
// For now, only support this for 1 argument.
|
||||
if (joined.?.data.isSafeToString()) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return joined;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.e_binary => |bin| {
|
||||
switch (bin.op) {
|
||||
// These operators must not have any type conversions that can execute code
|
||||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||||
.bin_strict_eq,
|
||||
.bin_strict_ne,
|
||||
.bin_comma,
|
||||
=> return simplifyUnusedBinaryCommaExpr(p, expr),
|
||||
|
||||
// We can simplify "==" and "!=" even though they can call "toString" and/or
|
||||
// "valueOf" if we can statically determine that the types of both sides are
|
||||
// primitives. In that case there won't be any chance for user-defined
|
||||
// "toString" and/or "valueOf" to be called.
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
=> {
|
||||
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
|
||||
return Expr.joinWithComma(
|
||||
simplifyUnusedExpr(p, bin.left) orelse bin.left.toEmpty(),
|
||||
simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty(),
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
// If one side is a number, the number can be printed as
|
||||
// `0` since the result being unused doesnt matter, we
|
||||
// only care to invoke the coercion.
|
||||
if (bin.left.data == .e_number) {
|
||||
bin.left.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
} else if (bin.right.data == .e_number) {
|
||||
bin.right.data = .{ .e_number = .{ .value = 0.0 } };
|
||||
}
|
||||
},
|
||||
|
||||
.bin_logical_and, .bin_logical_or, .bin_nullish_coalescing => {
|
||||
bin.right = simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty();
|
||||
// Preserve short-circuit behavior: the left expression is only unused if
|
||||
// the right expression can be completely removed. Otherwise, the left
|
||||
// expression is important for the branch.
|
||||
|
||||
if (bin.right.isEmpty())
|
||||
return simplifyUnusedExpr(p, bin.left);
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
.e_object => {
|
||||
// Objects with "..." spread expressions can't be unwrapped because the
|
||||
// "..." triggers code evaluation via getters. In that case, just trim
|
||||
// the other items instead and leave the object expression there.
|
||||
var properties_slice = expr.data.e_object.properties.slice();
|
||||
var end: usize = 0;
|
||||
for (properties_slice) |spread| {
|
||||
end = 0;
|
||||
if (spread.kind == .spread) {
|
||||
// Spread properties must always be evaluated
|
||||
for (properties_slice) |prop_| {
|
||||
var prop = prop_;
|
||||
if (prop_.kind != .spread) {
|
||||
const value = simplifyUnusedExpr(p, prop.value.?);
|
||||
if (value != null) {
|
||||
prop.value = value;
|
||||
} else if (!prop.flags.contains(.is_computed)) {
|
||||
continue;
|
||||
} else {
|
||||
prop.value = p.newExpr(E.Number{ .value = 0.0 }, prop.value.?.loc);
|
||||
}
|
||||
}
|
||||
|
||||
properties_slice[end] = prop_;
|
||||
end += 1;
|
||||
}
|
||||
|
||||
properties_slice = properties_slice[0..end];
|
||||
expr.data.e_object.properties = G.Property.List.init(properties_slice);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
|
||||
var result = Expr.init(E.Missing, E.Missing{}, expr.loc);
|
||||
|
||||
// Otherwise, the object can be completely removed. We only need to keep any
|
||||
// object properties with side effects. Apply this simplification recursively.
|
||||
for (properties_slice) |prop| {
|
||||
if (prop.flags.contains(.is_computed)) {
|
||||
// Make sure "ToString" is still evaluated on the key
|
||||
result = result.joinWithComma(
|
||||
p.newExpr(
|
||||
E.Binary{
|
||||
.op = .bin_add,
|
||||
.left = prop.key.?,
|
||||
.right = p.newExpr(E.String{}, prop.key.?.loc),
|
||||
},
|
||||
prop.key.?.loc,
|
||||
),
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
result = result.joinWithComma(
|
||||
simplifyUnusedExpr(p, prop.value.?) orelse prop.value.?.toEmpty(),
|
||||
p.allocator,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
.e_array => {
|
||||
var items = expr.data.e_array.items.slice();
|
||||
|
||||
for (items) |item| {
|
||||
if (item.data == .e_spread) {
|
||||
var end: usize = 0;
|
||||
for (items) |item__| {
|
||||
const item_ = item__;
|
||||
if (item_.data != .e_missing) {
|
||||
items[end] = item_;
|
||||
end += 1;
|
||||
}
|
||||
|
||||
expr.data.e_array.items = ExprNodeList.init(items[0..end]);
|
||||
return expr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, the array can be completely removed. We only need to keep any
|
||||
// array items with side effects. Apply this simplification recursively.
|
||||
return Expr.joinAllWithCommaCallback(
|
||||
items,
|
||||
@TypeOf(p),
|
||||
p,
|
||||
comptime simplifyUnusedExpr,
|
||||
p.allocator,
|
||||
);
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
|
||||
return expr;
|
||||
}
|
||||
|
||||
pub const BinaryExpressionSimplifyVisitor = struct {
|
||||
bin: *E.Binary,
|
||||
};
|
||||
|
||||
///
|
||||
fn simplifyUnusedBinaryCommaExpr(p: anytype, expr: Expr) ?Expr {
|
||||
if (Environment.allow_assert) {
|
||||
assert(expr.data == .e_binary);
|
||||
assert(switch (expr.data.e_binary.op) {
|
||||
.bin_strict_eq,
|
||||
.bin_strict_ne,
|
||||
.bin_comma,
|
||||
=> true,
|
||||
else => false,
|
||||
});
|
||||
}
|
||||
const stack: *std.ArrayList(BinaryExpressionSimplifyVisitor) = &p.binary_expression_simplify_stack;
|
||||
const stack_bottom = stack.items.len;
|
||||
defer stack.shrinkRetainingCapacity(stack_bottom);
|
||||
|
||||
stack.append(.{ .bin = expr.data.e_binary }) catch bun.outOfMemory();
|
||||
|
||||
// Build stack up of expressions
|
||||
var left: Expr = expr.data.e_binary.left;
|
||||
while (left.data.as(.e_binary)) |left_bin| {
|
||||
switch (left_bin.op) {
|
||||
.bin_strict_eq,
|
||||
.bin_strict_ne,
|
||||
.bin_comma,
|
||||
=> {
|
||||
stack.append(.{ .bin = left_bin }) catch bun.outOfMemory();
|
||||
left = left_bin.left;
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Ride the stack downwards
|
||||
var i = stack.items.len;
|
||||
var result = simplifyUnusedExpr(p, left) orelse Expr.empty;
|
||||
while (i > stack_bottom) {
|
||||
i -= 1;
|
||||
const top = stack.items[i];
|
||||
const visited_right = simplifyUnusedExpr(p, top.bin.right) orelse Expr.empty;
|
||||
result = result.joinWithComma(visited_right, p.allocator);
|
||||
}
|
||||
|
||||
return if (result.isMissing()) null else result;
|
||||
}
|
||||
|
||||
fn findIdentifiers(binding: Binding, decls: *std.ArrayList(G.Decl)) void {
|
||||
switch (binding.data) {
|
||||
.b_identifier => {
|
||||
decls.append(.{ .binding = binding }) catch unreachable;
|
||||
},
|
||||
.b_array => |array| {
|
||||
for (array.items) |item| {
|
||||
findIdentifiers(item.binding, decls);
|
||||
}
|
||||
},
|
||||
.b_object => |obj| {
|
||||
for (obj.properties) |item| {
|
||||
findIdentifiers(item.value, decls);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn shouldKeepStmtsInDeadControlFlow(stmts: []Stmt, allocator: Allocator) bool {
|
||||
for (stmts) |child| {
|
||||
if (shouldKeepStmtInDeadControlFlow(child, allocator)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/// If this is in a dead branch, then we want to trim as much dead code as we
|
||||
/// can. Everything can be trimmed except for hoisted declarations ("var" and
|
||||
/// "function"), which affect the parent scope. For example:
|
||||
///
|
||||
/// function foo() {
|
||||
/// if (false) { var x; }
|
||||
/// x = 1;
|
||||
/// }
|
||||
///
|
||||
/// We can't trim the entire branch as dead or calling foo() will incorrectly
|
||||
/// assign to a global variable instead.
|
||||
///
|
||||
/// Caller is expected to first check `p.options.dead_code_elimination` so we only check it once.
|
||||
pub fn shouldKeepStmtInDeadControlFlow(stmt: Stmt, allocator: Allocator) bool {
|
||||
switch (stmt.data) {
|
||||
// Omit these statements entirely
|
||||
.s_empty, .s_expr, .s_throw, .s_return, .s_break, .s_continue, .s_class, .s_debugger => return false,
|
||||
|
||||
.s_local => |local| {
|
||||
if (local.kind != .k_var) {
|
||||
// Omit these statements entirely
|
||||
return false;
|
||||
}
|
||||
|
||||
// Omit everything except the identifiers
|
||||
|
||||
// common case: single var foo = blah, don't need to allocate
|
||||
if (local.decls.len == 1 and local.decls.ptr[0].binding.data == .b_identifier) {
|
||||
const prev = local.decls.ptr[0];
|
||||
stmt.data.s_local.decls.ptr[0] = G.Decl{ .binding = prev.binding };
|
||||
return true;
|
||||
}
|
||||
|
||||
var decls = std.ArrayList(G.Decl).initCapacity(allocator, local.decls.len) catch unreachable;
|
||||
for (local.decls.slice()) |decl| {
|
||||
findIdentifiers(decl.binding, &decls);
|
||||
}
|
||||
|
||||
local.decls.update(decls);
|
||||
return true;
|
||||
},
|
||||
|
||||
.s_block => |block| {
|
||||
return shouldKeepStmtsInDeadControlFlow(block.stmts, allocator);
|
||||
},
|
||||
|
||||
.s_try => |try_stmt| {
|
||||
if (shouldKeepStmtsInDeadControlFlow(try_stmt.body, allocator)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (try_stmt.catch_) |*catch_stmt| {
|
||||
if (shouldKeepStmtsInDeadControlFlow(catch_stmt.body, allocator)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (try_stmt.finally) |*finally_stmt| {
|
||||
if (shouldKeepStmtsInDeadControlFlow(finally_stmt.stmts, allocator)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
|
||||
.s_if => |_if_| {
|
||||
if (shouldKeepStmtInDeadControlFlow(_if_.yes, allocator)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const no = _if_.no orelse return false;
|
||||
|
||||
return shouldKeepStmtInDeadControlFlow(no, allocator);
|
||||
},
|
||||
|
||||
.s_while => {
|
||||
return shouldKeepStmtInDeadControlFlow(stmt.data.s_while.body, allocator);
|
||||
},
|
||||
|
||||
.s_do_while => {
|
||||
return shouldKeepStmtInDeadControlFlow(stmt.data.s_do_while.body, allocator);
|
||||
},
|
||||
|
||||
.s_for => |__for__| {
|
||||
if (__for__.init) |init_| {
|
||||
if (shouldKeepStmtInDeadControlFlow(init_, allocator)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return shouldKeepStmtInDeadControlFlow(__for__.body, allocator);
|
||||
},
|
||||
|
||||
.s_for_in => |__for__| {
|
||||
return shouldKeepStmtInDeadControlFlow(__for__.init, allocator) or shouldKeepStmtInDeadControlFlow(__for__.body, allocator);
|
||||
},
|
||||
|
||||
.s_for_of => |__for__| {
|
||||
return shouldKeepStmtInDeadControlFlow(__for__.init, allocator) or shouldKeepStmtInDeadControlFlow(__for__.body, allocator);
|
||||
},
|
||||
|
||||
.s_label => |label| {
|
||||
return shouldKeepStmtInDeadControlFlow(label.stmt, allocator);
|
||||
},
|
||||
|
||||
else => return true,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns true if this expression is known to result in a primitive value (i.e.
|
||||
// null, undefined, boolean, number, bigint, or string), even if the expression
|
||||
// cannot be removed due to side effects.
|
||||
pub fn isPrimitiveWithSideEffects(data: Expr.Data) bool {
|
||||
switch (data) {
|
||||
.e_null,
|
||||
.e_undefined,
|
||||
.e_boolean,
|
||||
.e_number,
|
||||
.e_big_int,
|
||||
.e_string,
|
||||
.e_inlined_enum,
|
||||
=> {
|
||||
return true;
|
||||
},
|
||||
.e_unary => |e| {
|
||||
switch (e.op) {
|
||||
// number or bigint
|
||||
.un_pos,
|
||||
.un_neg,
|
||||
.un_cpl,
|
||||
.un_pre_dec,
|
||||
.un_pre_inc,
|
||||
.un_post_dec,
|
||||
.un_post_inc,
|
||||
// boolean
|
||||
.un_not,
|
||||
.un_delete,
|
||||
// undefined
|
||||
.un_void,
|
||||
// string
|
||||
.un_typeof,
|
||||
=> {
|
||||
return true;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_binary => |e| {
|
||||
switch (e.op) {
|
||||
// boolean
|
||||
.bin_lt,
|
||||
.bin_le,
|
||||
.bin_gt,
|
||||
.bin_ge,
|
||||
.bin_in,
|
||||
.bin_instanceof,
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
.bin_strict_eq,
|
||||
.bin_strict_ne,
|
||||
// string, number, or bigint
|
||||
.bin_add,
|
||||
.bin_add_assign,
|
||||
// number or bigint
|
||||
.bin_sub,
|
||||
.bin_mul,
|
||||
.bin_div,
|
||||
.bin_rem,
|
||||
.bin_pow,
|
||||
.bin_sub_assign,
|
||||
.bin_mul_assign,
|
||||
.bin_div_assign,
|
||||
.bin_rem_assign,
|
||||
.bin_pow_assign,
|
||||
.bin_shl,
|
||||
.bin_shr,
|
||||
.bin_u_shr,
|
||||
.bin_shl_assign,
|
||||
.bin_shr_assign,
|
||||
.bin_u_shr_assign,
|
||||
.bin_bitwise_or,
|
||||
.bin_bitwise_and,
|
||||
.bin_bitwise_xor,
|
||||
.bin_bitwise_or_assign,
|
||||
.bin_bitwise_and_assign,
|
||||
.bin_bitwise_xor_assign,
|
||||
=> {
|
||||
return true;
|
||||
},
|
||||
|
||||
// These always return one of the arguments unmodified
|
||||
.bin_logical_and,
|
||||
.bin_logical_or,
|
||||
.bin_nullish_coalescing,
|
||||
.bin_logical_and_assign,
|
||||
.bin_logical_or_assign,
|
||||
.bin_nullish_coalescing_assign,
|
||||
=> {
|
||||
return isPrimitiveWithSideEffects(e.left.data) and isPrimitiveWithSideEffects(e.right.data);
|
||||
},
|
||||
.bin_comma => {
|
||||
return isPrimitiveWithSideEffects(e.right.data);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_if => |e| {
|
||||
return isPrimitiveWithSideEffects(e.yes.data) and isPrimitiveWithSideEffects(e.no.data);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
pub const toTypeOf = Expr.Data.typeof;
|
||||
|
||||
pub fn toNullOrUndefined(p: anytype, exp: Expr.Data) Result {
|
||||
if (!p.options.features.dead_code_elimination) {
|
||||
// value should not be read if ok is false, all existing calls to this function already adhere to this
|
||||
return Result{ .ok = false, .value = undefined, .side_effects = .could_have_side_effects };
|
||||
}
|
||||
switch (exp) {
|
||||
// Never null or undefined
|
||||
.e_boolean, .e_number, .e_string, .e_reg_exp, .e_function, .e_arrow, .e_big_int => {
|
||||
return Result{ .value = false, .side_effects = .no_side_effects, .ok = true };
|
||||
},
|
||||
|
||||
.e_object, .e_array, .e_class => {
|
||||
return Result{ .value = false, .side_effects = .could_have_side_effects, .ok = true };
|
||||
},
|
||||
|
||||
// always a null or undefined
|
||||
.e_null, .e_undefined => {
|
||||
return Result{ .value = true, .side_effects = .no_side_effects, .ok = true };
|
||||
},
|
||||
|
||||
.e_unary => |e| {
|
||||
switch (e.op) {
|
||||
// Always number or bigint
|
||||
.un_pos,
|
||||
.un_neg,
|
||||
.un_cpl,
|
||||
.un_pre_dec,
|
||||
.un_pre_inc,
|
||||
.un_post_dec,
|
||||
.un_post_inc,
|
||||
|
||||
// Always boolean
|
||||
.un_not,
|
||||
.un_typeof,
|
||||
.un_delete,
|
||||
=> {
|
||||
return Result{ .ok = true, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||||
},
|
||||
|
||||
// Always undefined
|
||||
.un_void => {
|
||||
return Result{ .value = true, .side_effects = .could_have_side_effects, .ok = true };
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
.e_binary => |e| {
|
||||
switch (e.op) {
|
||||
// always string or number or bigint
|
||||
.bin_add,
|
||||
.bin_add_assign,
|
||||
// always number or bigint
|
||||
.bin_sub,
|
||||
.bin_mul,
|
||||
.bin_div,
|
||||
.bin_rem,
|
||||
.bin_pow,
|
||||
.bin_sub_assign,
|
||||
.bin_mul_assign,
|
||||
.bin_div_assign,
|
||||
.bin_rem_assign,
|
||||
.bin_pow_assign,
|
||||
.bin_shl,
|
||||
.bin_shr,
|
||||
.bin_u_shr,
|
||||
.bin_shl_assign,
|
||||
.bin_shr_assign,
|
||||
.bin_u_shr_assign,
|
||||
.bin_bitwise_or,
|
||||
.bin_bitwise_and,
|
||||
.bin_bitwise_xor,
|
||||
.bin_bitwise_or_assign,
|
||||
.bin_bitwise_and_assign,
|
||||
.bin_bitwise_xor_assign,
|
||||
// always boolean
|
||||
.bin_lt,
|
||||
.bin_le,
|
||||
.bin_gt,
|
||||
.bin_ge,
|
||||
.bin_in,
|
||||
.bin_instanceof,
|
||||
.bin_loose_eq,
|
||||
.bin_loose_ne,
|
||||
.bin_strict_eq,
|
||||
.bin_strict_ne,
|
||||
=> {
|
||||
return Result{ .ok = true, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||||
},
|
||||
|
||||
.bin_comma => {
|
||||
const res = toNullOrUndefined(p, e.right.data);
|
||||
if (res.ok) {
|
||||
return Result{ .ok = true, .value = res.value, .side_effects = SideEffects.could_have_side_effects };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_inlined_enum => |inlined| {
|
||||
return toNullOrUndefined(p, inlined.value.data);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return Result{ .ok = false, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||||
}
|
||||
|
||||
pub fn toBoolean(p: anytype, exp: Expr.Data) Result {
|
||||
// Only do this check once.
|
||||
if (!p.options.features.dead_code_elimination) {
|
||||
// value should not be read if ok is false, all existing calls to this function already adhere to this
|
||||
return Result{ .ok = false, .value = undefined, .side_effects = .could_have_side_effects };
|
||||
}
|
||||
|
||||
return toBooleanWithoutDCECheck(exp);
|
||||
}
|
||||
|
||||
// Avoid passing through *P
|
||||
// This is a very recursive function.
|
||||
fn toBooleanWithoutDCECheck(exp: Expr.Data) Result {
|
||||
switch (exp) {
|
||||
.e_null, .e_undefined => {
|
||||
return Result{ .ok = true, .value = false, .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_boolean => |e| {
|
||||
return Result{ .ok = true, .value = e.value, .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_number => |e| {
|
||||
return Result{ .ok = true, .value = e.value != 0.0 and !std.math.isNan(e.value), .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_big_int => |e| {
|
||||
return Result{ .ok = true, .value = !strings.eqlComptime(e.value, "0"), .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_string => |e| {
|
||||
return Result{ .ok = true, .value = e.isPresent(), .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_function, .e_arrow, .e_reg_exp => {
|
||||
return Result{ .ok = true, .value = true, .side_effects = .no_side_effects };
|
||||
},
|
||||
.e_object, .e_array, .e_class => {
|
||||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||||
},
|
||||
.e_unary => |e_| {
|
||||
switch (e_.op) {
|
||||
.un_void => {
|
||||
return Result{ .ok = true, .value = false, .side_effects = .could_have_side_effects };
|
||||
},
|
||||
.un_typeof => {
|
||||
// Never an empty string
|
||||
|
||||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||||
},
|
||||
.un_not => {
|
||||
const result = toBooleanWithoutDCECheck(e_.value.data);
|
||||
if (result.ok) {
|
||||
return .{ .ok = true, .value = !result.value, .side_effects = result.side_effects };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_binary => |e_| {
|
||||
switch (e_.op) {
|
||||
.bin_logical_or => {
|
||||
// "anything || truthy" is truthy
|
||||
const result = toBooleanWithoutDCECheck(e_.right.data);
|
||||
if (result.value and result.ok) {
|
||||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||||
}
|
||||
},
|
||||
.bin_logical_and => {
|
||||
// "anything && falsy" is falsy
|
||||
const result = toBooleanWithoutDCECheck(e_.right.data);
|
||||
if (!result.value and result.ok) {
|
||||
return Result{ .ok = true, .value = false, .side_effects = .could_have_side_effects };
|
||||
}
|
||||
},
|
||||
.bin_comma => {
|
||||
// "anything, truthy/falsy" is truthy/falsy
|
||||
var result = toBooleanWithoutDCECheck(e_.right.data);
|
||||
if (result.ok) {
|
||||
result.side_effects = .could_have_side_effects;
|
||||
return result;
|
||||
}
|
||||
},
|
||||
.bin_gt => {
|
||||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||||
return Result{ .ok = true, .value = left_num > right_num, .side_effects = .no_side_effects };
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_lt => {
|
||||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||||
return Result{ .ok = true, .value = left_num < right_num, .side_effects = .no_side_effects };
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_le => {
|
||||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||||
return Result{ .ok = true, .value = left_num <= right_num, .side_effects = .no_side_effects };
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_ge => {
|
||||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||||
return Result{ .ok = true, .value = left_num >= right_num, .side_effects = .no_side_effects };
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.e_inlined_enum => |inlined| {
|
||||
return toBooleanWithoutDCECheck(inlined.value.data);
|
||||
},
|
||||
.e_special => |special| switch (special) {
|
||||
.module_exports,
|
||||
.resolved_specifier_string,
|
||||
.hot_data,
|
||||
=> {},
|
||||
.hot_accept,
|
||||
.hot_accept_visited,
|
||||
.hot_enabled,
|
||||
=> return .{ .ok = true, .value = true, .side_effects = .no_side_effects },
|
||||
.hot_disabled,
|
||||
=> return .{ .ok = true, .value = false, .side_effects = .no_side_effects },
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return Result{ .ok = false, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||||
}
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const options = @import("../options.zig");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const assert = bun.assert;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Decl = G.Decl;
|
||||
const Property = G.Property;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
const Allocator = std.mem.Allocator;
|
||||
472
src/ast/TypeScript.zig
Normal file
472
src/ast/TypeScript.zig
Normal file
@@ -0,0 +1,472 @@
|
||||
// This function is taken from the official TypeScript compiler source code:
|
||||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||||
pub fn canFollowTypeArgumentsInExpression(p: anytype) bool {
|
||||
return switch (p.lexer.token) {
|
||||
// These are the only tokens can legally follow a type argument list. So we
|
||||
// definitely want to treat them as type arg lists.
|
||||
.t_open_paren, // foo<x>(
|
||||
.t_no_substitution_template_literal, // foo<T> `...`
|
||||
// foo<T> `...${100}...`
|
||||
.t_template_head,
|
||||
=> true,
|
||||
|
||||
// A type argument list followed by `<` never makes sense, and a type argument list followed
|
||||
// by `>` is ambiguous with a (re-scanned) `>>` operator, so we disqualify both. Also, in
|
||||
// this context, `+` and `-` are unary operators, not binary operators.
|
||||
.t_less_than,
|
||||
.t_greater_than,
|
||||
.t_plus,
|
||||
.t_minus,
|
||||
// TypeScript always sees "t_greater_than" instead of these tokens since
|
||||
// their scanner works a little differently than our lexer. So since
|
||||
// "t_greater_than" is forbidden above, we also forbid these too.
|
||||
.t_greater_than_equals,
|
||||
.t_greater_than_greater_than,
|
||||
.t_greater_than_greater_than_equals,
|
||||
.t_greater_than_greater_than_greater_than,
|
||||
.t_greater_than_greater_than_greater_than_equals,
|
||||
=> false,
|
||||
|
||||
// We favor the type argument list interpretation when it is immediately followed by
|
||||
// a line break, a binary operator, or something that can't start an expression.
|
||||
else => p.lexer.has_newline_before or isBinaryOperator(p) or !isStartOfExpression(p),
|
||||
};
|
||||
}
|
||||
|
||||
pub const Metadata = union(enum) {
|
||||
m_none: void,
|
||||
|
||||
m_never: void,
|
||||
m_unknown: void,
|
||||
m_any: void,
|
||||
m_void: void,
|
||||
m_null: void,
|
||||
m_undefined: void,
|
||||
m_function: void,
|
||||
m_array: void,
|
||||
m_boolean: void,
|
||||
m_string: void,
|
||||
m_object: void,
|
||||
m_number: void,
|
||||
m_bigint: void,
|
||||
m_symbol: void,
|
||||
m_promise: void,
|
||||
m_identifier: Ref,
|
||||
m_dot: List(Ref),
|
||||
|
||||
pub const default: @This() = .m_none;
|
||||
|
||||
// the logic in finishUnion, mergeUnion, finishIntersection and mergeIntersection is
|
||||
// translated from:
|
||||
// https://github.com/microsoft/TypeScript/blob/e0a324b0503be479f2b33fd2e17c6e86c94d1297/src/compiler/transformers/typeSerializer.ts#L402
|
||||
|
||||
/// Return the final union type if possible, or return null to continue merging.
|
||||
///
|
||||
/// If the current type is m_never, m_null, or m_undefined assign the current type
|
||||
/// to m_none and return null to ensure it's always replaced by the next type.
|
||||
pub fn finishUnion(current: *@This(), p: anytype) ?@This() {
|
||||
return switch (current.*) {
|
||||
.m_identifier => |ref| {
|
||||
if (strings.eqlComptime(p.loadNameFromRef(ref), "Object")) {
|
||||
return .m_object;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
.m_unknown,
|
||||
.m_any,
|
||||
.m_object,
|
||||
=> .m_object,
|
||||
|
||||
.m_never,
|
||||
.m_null,
|
||||
.m_undefined,
|
||||
=> {
|
||||
current.* = .m_none;
|
||||
return null;
|
||||
},
|
||||
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn mergeUnion(result: *@This(), left: @This()) void {
|
||||
if (left != .m_none) {
|
||||
if (std.meta.activeTag(result.*) != std.meta.activeTag(left)) {
|
||||
result.* = switch (result.*) {
|
||||
.m_never,
|
||||
.m_undefined,
|
||||
.m_null,
|
||||
=> left,
|
||||
|
||||
else => .m_object,
|
||||
};
|
||||
} else {
|
||||
switch (result.*) {
|
||||
.m_identifier => |ref| {
|
||||
if (!ref.eql(left.m_identifier)) {
|
||||
result.* = .m_object;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// always take the next value if left is m_none
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the final intersection type if possible, or return null to continue merging.
|
||||
///
|
||||
/// If the current type is m_unknown, m_null, or m_undefined assign the current type
|
||||
/// to m_none and return null to ensure it's always replaced by the next type.
|
||||
pub fn finishIntersection(current: *@This(), p: anytype) ?@This() {
|
||||
return switch (current.*) {
|
||||
.m_identifier => |ref| {
|
||||
if (strings.eqlComptime(p.loadNameFromRef(ref), "Object")) {
|
||||
return .m_object;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
// ensure m_never is the final type
|
||||
.m_never => .m_never,
|
||||
|
||||
.m_any,
|
||||
.m_object,
|
||||
=> .m_object,
|
||||
|
||||
.m_unknown,
|
||||
.m_null,
|
||||
.m_undefined,
|
||||
=> {
|
||||
current.* = .m_none;
|
||||
return null;
|
||||
},
|
||||
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn mergeIntersection(result: *@This(), left: @This()) void {
|
||||
if (left != .m_none) {
|
||||
if (std.meta.activeTag(result.*) != std.meta.activeTag(left)) {
|
||||
result.* = switch (result.*) {
|
||||
.m_unknown,
|
||||
.m_undefined,
|
||||
.m_null,
|
||||
=> left,
|
||||
|
||||
// ensure m_never is the final type
|
||||
.m_never => .m_never,
|
||||
|
||||
else => .m_object,
|
||||
};
|
||||
} else {
|
||||
switch (result.*) {
|
||||
.m_identifier => |ref| {
|
||||
if (!ref.eql(left.m_identifier)) {
|
||||
result.* = .m_object;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// make sure intersection of only m_unknown serializes to "undefined"
|
||||
// instead of "Object"
|
||||
if (result.* == .m_unknown) {
|
||||
result.* = .m_undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn isTSArrowFnJSX(p: anytype) !bool {
|
||||
const old_lexer = p.lexer;
|
||||
|
||||
try p.lexer.next();
|
||||
// Look ahead to see if this should be an arrow function instead
|
||||
var is_ts_arrow_fn = false;
|
||||
|
||||
if (p.lexer.token == .t_const) {
|
||||
try p.lexer.next();
|
||||
}
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.token == .t_comma) {
|
||||
is_ts_arrow_fn = true;
|
||||
} else if (p.lexer.token == .t_extends) {
|
||||
try p.lexer.next();
|
||||
is_ts_arrow_fn = p.lexer.token != .t_equals and p.lexer.token != .t_greater_than;
|
||||
}
|
||||
}
|
||||
|
||||
// Restore the lexer
|
||||
p.lexer.restore(&old_lexer);
|
||||
return is_ts_arrow_fn;
|
||||
}
|
||||
|
||||
// This function is taken from the official TypeScript compiler source code:
|
||||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||||
fn isBinaryOperator(p: anytype) bool {
|
||||
return switch (p.lexer.token) {
|
||||
.t_in => p.allow_in,
|
||||
|
||||
.t_question_question,
|
||||
.t_bar_bar,
|
||||
.t_ampersand_ampersand,
|
||||
.t_bar,
|
||||
.t_caret,
|
||||
.t_ampersand,
|
||||
.t_equals_equals,
|
||||
.t_exclamation_equals,
|
||||
.t_equals_equals_equals,
|
||||
.t_exclamation_equals_equals,
|
||||
.t_less_than,
|
||||
.t_greater_than,
|
||||
.t_less_than_equals,
|
||||
.t_greater_than_equals,
|
||||
.t_instanceof,
|
||||
.t_less_than_less_than,
|
||||
.t_greater_than_greater_than,
|
||||
.t_greater_than_greater_than_greater_than,
|
||||
.t_plus,
|
||||
.t_minus,
|
||||
.t_asterisk,
|
||||
.t_slash,
|
||||
.t_percent,
|
||||
.t_asterisk_asterisk,
|
||||
=> true,
|
||||
.t_identifier => p.lexer.isContextualKeyword("as") or p.lexer.isContextualKeyword("satisfies"),
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
// This function is taken from the official TypeScript compiler source code:
|
||||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||||
fn isStartOfLeftHandSideExpression(p: anytype) bool {
|
||||
return switch (p.lexer.token) {
|
||||
.t_this,
|
||||
.t_super,
|
||||
.t_null,
|
||||
.t_true,
|
||||
.t_false,
|
||||
.t_numeric_literal,
|
||||
.t_big_integer_literal,
|
||||
.t_string_literal,
|
||||
.t_no_substitution_template_literal,
|
||||
.t_template_head,
|
||||
.t_open_paren,
|
||||
.t_open_bracket,
|
||||
.t_open_brace,
|
||||
.t_function,
|
||||
.t_class,
|
||||
.t_new,
|
||||
.t_slash,
|
||||
.t_slash_equals,
|
||||
.t_identifier,
|
||||
=> true,
|
||||
.t_import => lookAheadNextTokenIsOpenParenOrLessThanOrDot(p),
|
||||
else => isIdentifier(p),
|
||||
};
|
||||
}
|
||||
|
||||
fn lookAheadNextTokenIsOpenParenOrLessThanOrDot(p: anytype) bool {
|
||||
const old_lexer = p.lexer;
|
||||
const old_log_disabled = p.lexer.is_log_disabled;
|
||||
p.lexer.is_log_disabled = true;
|
||||
defer {
|
||||
p.lexer.restore(&old_lexer);
|
||||
p.lexer.is_log_disabled = old_log_disabled;
|
||||
}
|
||||
p.lexer.next() catch {};
|
||||
|
||||
return switch (p.lexer.token) {
|
||||
.t_open_paren, .t_less_than, .t_dot => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
// This function is taken from the official TypeScript compiler source code:
|
||||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||||
fn isIdentifier(p: anytype) bool {
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
// If we have a 'yield' keyword, and we're in the [yield] context, then 'yield' is
|
||||
// considered a keyword and is not an identifier.
|
||||
if (p.fn_or_arrow_data_parse.allow_yield != .allow_ident and strings.eqlComptime(p.lexer.identifier, "yield")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If we have an 'await' keyword, and we're in the [await] context, then 'await' is
|
||||
// considered a keyword and is not an identifier.
|
||||
if (p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(p.lexer.identifier, "await")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
fn isStartOfExpression(p: anytype) bool {
|
||||
if (isStartOfLeftHandSideExpression(p))
|
||||
return true;
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_plus,
|
||||
.t_minus,
|
||||
.t_tilde,
|
||||
.t_exclamation,
|
||||
.t_delete,
|
||||
.t_typeof,
|
||||
.t_void,
|
||||
.t_plus_plus,
|
||||
.t_minus_minus,
|
||||
.t_less_than,
|
||||
.t_private_identifier,
|
||||
.t_at,
|
||||
=> return true,
|
||||
else => {
|
||||
if (p.lexer.token == .t_identifier and (strings.eqlComptime(p.lexer.identifier, "await") or strings.eqlComptime(p.lexer.identifier, "yield"))) {
|
||||
// Yield/await always starts an expression. Either it is an identifier (in which case
|
||||
// it is definitely an expression). Or it's a keyword (either because we're in
|
||||
// a generator or async function, or in strict mode (or both)) and it started a yield or await expression.
|
||||
return true;
|
||||
}
|
||||
|
||||
// Error tolerance. If we see the start of some binary operator, we consider
|
||||
// that the start of an expression. That way we'll parse out a missing identifier,
|
||||
// give a good message about an identifier being missing, and then consume the
|
||||
// rest of the binary expression.
|
||||
if (isBinaryOperator(p)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return isIdentifier(p);
|
||||
},
|
||||
}
|
||||
|
||||
unreachable;
|
||||
}
|
||||
|
||||
pub const Identifier = struct {
|
||||
pub const StmtIdentifier = enum {
|
||||
s_type,
|
||||
|
||||
s_namespace,
|
||||
|
||||
s_abstract,
|
||||
|
||||
s_module,
|
||||
|
||||
s_interface,
|
||||
|
||||
s_declare,
|
||||
};
|
||||
pub fn forStr(str: string) ?StmtIdentifier {
|
||||
switch (str.len) {
|
||||
"type".len => return if (strings.eqlComptimeIgnoreLen(str, "type"))
|
||||
.s_type
|
||||
else
|
||||
null,
|
||||
"interface".len => {
|
||||
if (strings.eqlComptime(str, "interface")) {
|
||||
return .s_interface;
|
||||
} else if (strings.eqlComptime(str, "namespace")) {
|
||||
return .s_namespace;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
"abstract".len => {
|
||||
if (strings.eqlComptime(str, "abstract")) {
|
||||
return .s_abstract;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
"declare".len => {
|
||||
if (strings.eqlComptime(str, "declare")) {
|
||||
return .s_declare;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
"module".len => {
|
||||
if (strings.eqlComptime(str, "module")) {
|
||||
return .s_module;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
pub const IMap = bun.ComptimeStringMap(Kind, .{
|
||||
.{ "unique", .unique },
|
||||
.{ "abstract", .abstract },
|
||||
.{ "asserts", .asserts },
|
||||
|
||||
.{ "keyof", .prefix_keyof },
|
||||
.{ "readonly", .prefix_readonly },
|
||||
|
||||
.{ "any", .primitive_any },
|
||||
.{ "never", .primitive_never },
|
||||
.{ "unknown", .primitive_unknown },
|
||||
.{ "undefined", .primitive_undefined },
|
||||
.{ "object", .primitive_object },
|
||||
.{ "number", .primitive_number },
|
||||
.{ "string", .primitive_string },
|
||||
.{ "boolean", .primitive_boolean },
|
||||
.{ "bigint", .primitive_bigint },
|
||||
.{ "symbol", .primitive_symbol },
|
||||
|
||||
.{ "infer", .infer },
|
||||
});
|
||||
pub const Kind = enum {
|
||||
normal,
|
||||
unique,
|
||||
abstract,
|
||||
asserts,
|
||||
prefix_keyof,
|
||||
prefix_readonly,
|
||||
primitive_any,
|
||||
primitive_never,
|
||||
primitive_unknown,
|
||||
primitive_undefined,
|
||||
primitive_object,
|
||||
primitive_number,
|
||||
primitive_string,
|
||||
primitive_boolean,
|
||||
primitive_bigint,
|
||||
primitive_symbol,
|
||||
infer,
|
||||
};
|
||||
};
|
||||
|
||||
pub const SkipTypeOptions = enum {
|
||||
is_return_type,
|
||||
is_index_signature,
|
||||
allow_tuple_labels,
|
||||
disallow_conditional_types,
|
||||
|
||||
pub const Bitset = std.enums.EnumSet(@This());
|
||||
pub const empty = Bitset.initEmpty();
|
||||
};
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const Ref = js_parser.Ref;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
233
src/ast/foldStringAddition.zig
Normal file
233
src/ast/foldStringAddition.zig
Normal file
@@ -0,0 +1,233 @@
|
||||
/// Concatenate two `E.String`s, mutating BOTH inputs
|
||||
/// unless `has_inlined_enum_poison` is set.
|
||||
///
|
||||
/// Currently inlined enum poison refers to where mutation would cause output
|
||||
/// bugs due to inlined enum values sharing `E.String`s. If a new use case
|
||||
/// besides inlined enums comes up to set this to true, please rename the
|
||||
/// variable and document it.
|
||||
fn joinStrings(left: *const E.String, right: *const E.String, has_inlined_enum_poison: bool) E.String {
|
||||
var new = if (has_inlined_enum_poison)
|
||||
// Inlined enums can be shared by multiple call sites. In
|
||||
// this case, we need to ensure that the ENTIRE rope is
|
||||
// cloned. In other situations, the lhs doesn't have any
|
||||
// other owner, so it is fine to mutate `lhs.data.end.next`.
|
||||
//
|
||||
// Consider the following case:
|
||||
// const enum A {
|
||||
// B = "a" + "b",
|
||||
// D = B + "d",
|
||||
// };
|
||||
// console.log(A.B, A.D);
|
||||
left.cloneRopeNodes()
|
||||
else
|
||||
left.*;
|
||||
|
||||
// Similarly, the right side has to be cloned for an enum rope too.
|
||||
//
|
||||
// Consider the following case:
|
||||
// const enum A {
|
||||
// B = "1" + "2",
|
||||
// C = ("3" + B) + "4",
|
||||
// };
|
||||
// console.log(A.B, A.C);
|
||||
const rhs_clone = Expr.Data.Store.append(E.String, if (has_inlined_enum_poison)
|
||||
right.cloneRopeNodes()
|
||||
else
|
||||
right.*);
|
||||
|
||||
new.push(rhs_clone);
|
||||
new.prefer_template = new.prefer_template or rhs_clone.prefer_template;
|
||||
|
||||
return new;
|
||||
}
|
||||
|
||||
/// Transforming the left operand into a string is not safe if it comes from a
|
||||
/// nested AST node.
|
||||
const FoldStringAdditionKind = enum {
|
||||
// "x" + "y" -> "xy"
|
||||
// 1 + "y" -> "1y"
|
||||
normal,
|
||||
// a + "x" + "y" -> a + "xy"
|
||||
// a + 1 + "y" -> a + 1 + y
|
||||
nested_left,
|
||||
};
|
||||
|
||||
/// NOTE: unlike esbuild's js_ast_helpers.FoldStringAddition, this does mutate
|
||||
/// the input AST in the case of rope strings
|
||||
pub fn foldStringAddition(l: Expr, r: Expr, allocator: std.mem.Allocator, kind: FoldStringAdditionKind) ?Expr {
|
||||
// "See through" inline enum constants
|
||||
// TODO: implement foldAdditionPreProcess to fold some more things :)
|
||||
var lhs = l.unwrapInlined();
|
||||
var rhs = r.unwrapInlined();
|
||||
|
||||
if (kind != .nested_left) {
|
||||
// See comment on `FoldStringAdditionKind` for examples
|
||||
switch (rhs.data) {
|
||||
.e_string, .e_template => {
|
||||
if (lhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||||
lhs = str;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
switch (lhs.data) {
|
||||
.e_string => |left| {
|
||||
if (rhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||||
rhs = str;
|
||||
}
|
||||
|
||||
if (left.isUTF8()) {
|
||||
switch (rhs.data) {
|
||||
// "bar" + "baz" => "barbaz"
|
||||
.e_string => |right| {
|
||||
if (right.isUTF8()) {
|
||||
const has_inlined_enum_poison =
|
||||
l.data == .e_inlined_enum or
|
||||
r.data == .e_inlined_enum;
|
||||
|
||||
return Expr.init(E.String, joinStrings(
|
||||
left,
|
||||
right,
|
||||
has_inlined_enum_poison,
|
||||
), lhs.loc);
|
||||
}
|
||||
},
|
||||
// "bar" + `baz${bar}` => `barbaz${bar}`
|
||||
.e_template => |right| {
|
||||
if (right.head.isUTF8()) {
|
||||
return Expr.init(E.Template, E.Template{
|
||||
.parts = right.parts,
|
||||
.head = .{ .cooked = joinStrings(
|
||||
left,
|
||||
&right.head.cooked,
|
||||
l.data == .e_inlined_enum,
|
||||
) },
|
||||
}, l.loc);
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// other constant-foldable ast nodes would have been converted to .e_string
|
||||
},
|
||||
}
|
||||
|
||||
// "'x' + `y${z}`" => "`xy${z}`"
|
||||
if (rhs.data == .e_template and rhs.data.e_template.tag == null) {}
|
||||
}
|
||||
|
||||
if (left.len() == 0 and rhs.knownPrimitive() == .string) {
|
||||
return rhs;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
.e_template => |left| {
|
||||
// "`${x}` + 0" => "`${x}` + '0'"
|
||||
if (rhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||||
rhs = str;
|
||||
}
|
||||
|
||||
if (left.tag == null) {
|
||||
switch (rhs.data) {
|
||||
// `foo${bar}` + "baz" => `foo${bar}baz`
|
||||
.e_string => |right| {
|
||||
if (right.isUTF8()) {
|
||||
// Mutation of this node is fine because it will be not
|
||||
// be shared by other places. Note that e_template will
|
||||
// be treated by enums as strings, but will not be
|
||||
// inlined unless they could be converted into
|
||||
// .e_string.
|
||||
if (left.parts.len > 0) {
|
||||
const i = left.parts.len - 1;
|
||||
const last = left.parts[i];
|
||||
if (last.tail.isUTF8()) {
|
||||
left.parts[i].tail = .{ .cooked = joinStrings(
|
||||
&last.tail.cooked,
|
||||
right,
|
||||
r.data == .e_inlined_enum,
|
||||
) };
|
||||
return lhs;
|
||||
}
|
||||
} else {
|
||||
if (left.head.isUTF8()) {
|
||||
left.head = .{ .cooked = joinStrings(
|
||||
&left.head.cooked,
|
||||
right,
|
||||
r.data == .e_inlined_enum,
|
||||
) };
|
||||
return lhs;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
// `foo${bar}` + `a${hi}b` => `foo${bar}a${hi}b`
|
||||
.e_template => |right| {
|
||||
if (right.tag == null and right.head.isUTF8()) {
|
||||
if (left.parts.len > 0) {
|
||||
const i = left.parts.len - 1;
|
||||
const last = left.parts[i];
|
||||
if (last.tail.isUTF8() and right.head.isUTF8()) {
|
||||
left.parts[i].tail = .{ .cooked = joinStrings(
|
||||
&last.tail.cooked,
|
||||
&right.head.cooked,
|
||||
r.data == .e_inlined_enum,
|
||||
) };
|
||||
|
||||
left.parts = if (right.parts.len == 0)
|
||||
left.parts
|
||||
else
|
||||
std.mem.concat(
|
||||
allocator,
|
||||
E.TemplatePart,
|
||||
&.{ left.parts, right.parts },
|
||||
) catch bun.outOfMemory();
|
||||
return lhs;
|
||||
}
|
||||
} else {
|
||||
if (left.head.isUTF8() and right.head.isUTF8()) {
|
||||
left.head = .{ .cooked = joinStrings(
|
||||
&left.head.cooked,
|
||||
&right.head.cooked,
|
||||
r.data == .e_inlined_enum,
|
||||
) };
|
||||
left.parts = right.parts;
|
||||
return lhs;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {
|
||||
// other constant-foldable ast nodes would have been converted to .e_string
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
else => {
|
||||
// other constant-foldable ast nodes would have been converted to .e_string
|
||||
},
|
||||
}
|
||||
|
||||
if (rhs.data.as(.e_string)) |right| {
|
||||
if (right.len() == 0 and lhs.knownPrimitive() == .string) {
|
||||
return lhs;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
725
src/ast/maybe.zig
Normal file
725
src/ast/maybe.zig
Normal file
@@ -0,0 +1,725 @@
|
||||
pub fn AstMaybe(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
|
||||
pub fn maybeRelocateVarsToTopLevel(p: *P, decls: []const G.Decl, mode: RelocateVars.Mode) RelocateVars {
|
||||
// Only do this when the scope is not already top-level and when we're not inside a function.
|
||||
if (p.current_scope == p.module_scope) {
|
||||
return .{ .ok = false };
|
||||
}
|
||||
|
||||
var scope = p.current_scope;
|
||||
while (!scope.kindStopsHoisting()) {
|
||||
if (comptime Environment.allow_assert) assert(scope.parent != null);
|
||||
scope = scope.parent.?;
|
||||
}
|
||||
|
||||
if (scope != p.module_scope) {
|
||||
return .{ .ok = false };
|
||||
}
|
||||
|
||||
var value: Expr = Expr{ .loc = logger.Loc.Empty, .data = Expr.Data{ .e_missing = E.Missing{} } };
|
||||
|
||||
for (decls) |decl| {
|
||||
const binding = Binding.toExpr(
|
||||
&decl.binding,
|
||||
p.to_expr_wrapper_hoisted,
|
||||
);
|
||||
if (decl.value) |decl_value| {
|
||||
value = value.joinWithComma(Expr.assign(binding, decl_value), p.allocator);
|
||||
} else if (mode == .for_in_or_for_of) {
|
||||
value = value.joinWithComma(binding, p.allocator);
|
||||
}
|
||||
}
|
||||
|
||||
if (value.data == .e_missing) {
|
||||
return .{ .ok = true };
|
||||
}
|
||||
|
||||
return .{ .stmt = p.s(S.SExpr{ .value = value }, value.loc), .ok = true };
|
||||
}
|
||||
|
||||
// EDot nodes represent a property access. This function may return an
|
||||
// expression to replace the property access with. It assumes that the
|
||||
// target of the EDot expression has already been visited.
|
||||
pub fn maybeRewritePropertyAccess(
|
||||
p: *P,
|
||||
loc: logger.Loc,
|
||||
target: js_ast.Expr,
|
||||
name: string,
|
||||
name_loc: logger.Loc,
|
||||
identifier_opts: IdentifierOpts,
|
||||
) ?Expr {
|
||||
sw: switch (target.data) {
|
||||
.e_identifier => |id| {
|
||||
// Rewrite property accesses on explicit namespace imports as an identifier.
|
||||
// This lets us replace them easily in the printer to rebind them to
|
||||
// something else without paying the cost of a whole-tree traversal during
|
||||
// module linking just to rewrite these EDot expressions.
|
||||
if (p.options.bundle) {
|
||||
if (p.import_items_for_namespace.getPtr(id.ref)) |import_items| {
|
||||
const ref = (import_items.get(name) orelse brk: {
|
||||
// Generate a new import item symbol in the module scope
|
||||
const new_item = LocRef{
|
||||
.loc = name_loc,
|
||||
.ref = p.newSymbol(.import, name) catch unreachable,
|
||||
};
|
||||
p.module_scope.generated.push(p.allocator, new_item.ref.?) catch unreachable;
|
||||
|
||||
import_items.put(name, new_item) catch unreachable;
|
||||
p.is_import_item.put(p.allocator, new_item.ref.?, {}) catch unreachable;
|
||||
|
||||
var symbol = &p.symbols.items[new_item.ref.?.innerIndex()];
|
||||
|
||||
// Mark this as generated in case it's missing. We don't want to
|
||||
// generate errors for missing import items that are automatically
|
||||
// generated.
|
||||
symbol.import_item_status = .generated;
|
||||
|
||||
break :brk new_item;
|
||||
}).ref.?;
|
||||
|
||||
// Undo the usage count for the namespace itself. This is used later
|
||||
// to detect whether the namespace symbol has ever been "captured"
|
||||
// or whether it has just been used to read properties off of.
|
||||
//
|
||||
// The benefit of doing this is that if both this module and the
|
||||
// imported module end up in the same module group and the namespace
|
||||
// symbol has never been captured, then we don't need to generate
|
||||
// any code for the namespace at all.
|
||||
p.ignoreUsage(id.ref);
|
||||
|
||||
// Track how many times we've referenced this symbol
|
||||
p.recordUsage(ref);
|
||||
|
||||
return p.handleIdentifier(
|
||||
name_loc,
|
||||
E.Identifier{ .ref = ref },
|
||||
name,
|
||||
.{
|
||||
.assign_target = identifier_opts.assign_target,
|
||||
.is_call_target = identifier_opts.is_call_target,
|
||||
.is_delete_target = identifier_opts.is_delete_target,
|
||||
|
||||
// If this expression is used as the target of a call expression, make
|
||||
// sure the value of "this" is preserved.
|
||||
.was_originally_identifier = false,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!p.is_control_flow_dead and id.ref.eql(p.module_ref)) {
|
||||
// Rewrite "module.require()" to "require()" for Webpack compatibility.
|
||||
// See https://github.com/webpack/webpack/pull/7750 for more info.
|
||||
// This also makes correctness a little easier.
|
||||
if (identifier_opts.is_call_target and strings.eqlComptime(name, "require")) {
|
||||
p.ignoreUsage(p.module_ref);
|
||||
return p.valueForRequire(name_loc);
|
||||
} else if (!p.commonjs_named_exports_deoptimized and strings.eqlComptime(name, "exports")) {
|
||||
if (identifier_opts.assign_target != .none) {
|
||||
p.commonjs_module_exports_assigned_deoptimized = true;
|
||||
}
|
||||
|
||||
// Detect if we are doing
|
||||
//
|
||||
// module.exports = {
|
||||
// foo: "bar"
|
||||
// }
|
||||
//
|
||||
// Note that it cannot be any of these:
|
||||
//
|
||||
// module.exports += { };
|
||||
// delete module.exports = {};
|
||||
// module.exports()
|
||||
if (!(identifier_opts.is_call_target or identifier_opts.is_delete_target) and
|
||||
identifier_opts.assign_target == .replace and
|
||||
p.stmt_expr_value == .e_binary and
|
||||
p.stmt_expr_value.e_binary.op == .bin_assign)
|
||||
{
|
||||
if (
|
||||
// if it's not top-level, don't do this
|
||||
p.module_scope != p.current_scope or
|
||||
// if you do
|
||||
//
|
||||
// exports.foo = 123;
|
||||
// module.exports = {};
|
||||
//
|
||||
// that's a de-opt.
|
||||
p.commonjs_named_exports.count() > 0 or
|
||||
|
||||
// anything which is not module.exports = {} is a de-opt.
|
||||
p.stmt_expr_value.e_binary.right.data != .e_object or
|
||||
p.stmt_expr_value.e_binary.left.data != .e_dot or
|
||||
!strings.eqlComptime(p.stmt_expr_value.e_binary.left.data.e_dot.name, "exports") or
|
||||
p.stmt_expr_value.e_binary.left.data.e_dot.target.data != .e_identifier or
|
||||
!p.stmt_expr_value.e_binary.left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
|
||||
{
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
|
||||
const props: []const G.Property = p.stmt_expr_value.e_binary.right.data.e_object.properties.slice();
|
||||
for (props) |prop| {
|
||||
// if it's not a trivial object literal, de-opt
|
||||
if (prop.kind != .normal or
|
||||
prop.key == null or
|
||||
prop.key.?.data != .e_string or
|
||||
prop.flags.contains(Flags.Property.is_method) or
|
||||
prop.flags.contains(Flags.Property.is_computed) or
|
||||
prop.flags.contains(Flags.Property.is_spread) or
|
||||
prop.flags.contains(Flags.Property.is_static) or
|
||||
// If it creates a new scope, we can't do this optimization right now
|
||||
// Our scope order verification stuff will get mad
|
||||
// But we should let you do module.exports = { bar: foo(), baz: 123 }
|
||||
// just not module.exports = { bar: function() {} }
|
||||
// just not module.exports = { bar() {} }
|
||||
switch (prop.value.?.data) {
|
||||
.e_commonjs_export_identifier, .e_import_identifier, .e_identifier => false,
|
||||
.e_call => |call| switch (call.target.data) {
|
||||
.e_commonjs_export_identifier, .e_import_identifier, .e_identifier => false,
|
||||
else => |call_target| !@as(Expr.Tag, call_target).isPrimitiveLiteral(),
|
||||
},
|
||||
else => !prop.value.?.isPrimitiveLiteral(),
|
||||
})
|
||||
{
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
// empty object de-opts because otherwise the statement becomes
|
||||
// <empty space> = {};
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
|
||||
var stmts = std.ArrayList(Stmt).initCapacity(p.allocator, props.len * 2) catch unreachable;
|
||||
var decls = p.allocator.alloc(Decl, props.len) catch unreachable;
|
||||
var clause_items = p.allocator.alloc(js_ast.ClauseItem, props.len) catch unreachable;
|
||||
|
||||
for (props) |prop| {
|
||||
const key = prop.key.?.data.e_string.string(p.allocator) catch unreachable;
|
||||
const visited_value = p.visitExpr(prop.value.?);
|
||||
const value = SideEffects.simplifyUnusedExpr(p, visited_value) orelse visited_value;
|
||||
|
||||
// We are doing `module.exports = { ... }`
|
||||
// lets rewrite it to a series of what will become export assignments
|
||||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, key) catch unreachable;
|
||||
if (!named_export_entry.found_existing) {
|
||||
const new_ref = p.newSymbol(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(key)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
.ref = new_ref,
|
||||
},
|
||||
.needs_decl = false,
|
||||
};
|
||||
}
|
||||
const ref = named_export_entry.value_ptr.loc_ref.ref.?;
|
||||
// module.exports = {
|
||||
// foo: "bar",
|
||||
// baz: "qux",
|
||||
// }
|
||||
// ->
|
||||
// exports.foo = "bar", exports.baz = "qux"
|
||||
// Which will become
|
||||
// $foo = "bar";
|
||||
// $baz = "qux";
|
||||
// export { $foo as foo, $baz as baz }
|
||||
|
||||
decls[0] = .{
|
||||
.binding = p.b(B.Identifier{ .ref = ref }, prop.key.?.loc),
|
||||
.value = value,
|
||||
};
|
||||
// we have to ensure these are known to be top-level
|
||||
p.declared_symbols.append(p.allocator, .{
|
||||
.ref = ref,
|
||||
.is_top_level = true,
|
||||
}) catch unreachable;
|
||||
p.had_commonjs_named_exports_this_visit = true;
|
||||
clause_items[0] = js_ast.ClauseItem{
|
||||
// We want the generated name to not conflict
|
||||
.alias = key,
|
||||
.alias_loc = prop.key.?.loc,
|
||||
.name = named_export_entry.value_ptr.loc_ref,
|
||||
};
|
||||
|
||||
stmts.appendSlice(
|
||||
&[_]Stmt{
|
||||
p.s(
|
||||
S.Local{
|
||||
.kind = .k_var,
|
||||
.is_export = false,
|
||||
.was_commonjs_export = true,
|
||||
.decls = G.Decl.List.init(decls[0..1]),
|
||||
},
|
||||
prop.key.?.loc,
|
||||
),
|
||||
p.s(
|
||||
S.ExportClause{
|
||||
.items = clause_items[0..1],
|
||||
.is_single_line = true,
|
||||
},
|
||||
prop.key.?.loc,
|
||||
),
|
||||
},
|
||||
) catch unreachable;
|
||||
decls = decls[1..];
|
||||
clause_items = clause_items[1..];
|
||||
}
|
||||
|
||||
p.ignoreUsage(p.module_ref);
|
||||
p.commonjs_replacement_stmts = stmts.items;
|
||||
return p.newExpr(E.Missing{}, name_loc);
|
||||
}
|
||||
|
||||
// Deoptimizations:
|
||||
// delete module.exports
|
||||
// module.exports();
|
||||
if (identifier_opts.is_call_target or identifier_opts.is_delete_target or identifier_opts.assign_target != .none) {
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
|
||||
// rewrite `module.exports` to `exports`
|
||||
return .{ .data = .{ .e_special = .module_exports }, .loc = name_loc };
|
||||
} else if (p.options.bundle and strings.eqlComptime(name, "id") and identifier_opts.assign_target == .none) {
|
||||
// inline module.id
|
||||
p.ignoreUsage(p.module_ref);
|
||||
return p.newExpr(E.String.init(p.source.path.pretty), name_loc);
|
||||
} else if (p.options.bundle and strings.eqlComptime(name, "filename") and identifier_opts.assign_target == .none) {
|
||||
// inline module.filename
|
||||
p.ignoreUsage(p.module_ref);
|
||||
return p.newExpr(E.String.init(p.source.path.name.filename), name_loc);
|
||||
} else if (p.options.bundle and strings.eqlComptime(name, "path") and identifier_opts.assign_target == .none) {
|
||||
// inline module.path
|
||||
p.ignoreUsage(p.module_ref);
|
||||
return p.newExpr(E.String.init(p.source.path.pretty), name_loc);
|
||||
}
|
||||
}
|
||||
|
||||
if (p.shouldUnwrapCommonJSToESM()) {
|
||||
if (!p.is_control_flow_dead and id.ref.eql(p.exports_ref)) {
|
||||
if (!p.commonjs_named_exports_deoptimized) {
|
||||
if (identifier_opts.is_delete_target) {
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
|
||||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
|
||||
if (!named_export_entry.found_existing) {
|
||||
const new_ref = p.newSymbol(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
.ref = new_ref,
|
||||
},
|
||||
.needs_decl = true,
|
||||
};
|
||||
if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
|
||||
p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
|
||||
}
|
||||
|
||||
const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
|
||||
p.ignoreUsage(id.ref);
|
||||
p.recordUsage(ref);
|
||||
|
||||
return p.newExpr(
|
||||
E.CommonJSExportIdentifier{
|
||||
.ref = ref,
|
||||
},
|
||||
name_loc,
|
||||
);
|
||||
} else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
|
||||
p.has_commonjs_export_names = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle references to namespaces or namespace members
|
||||
if (p.ts_namespace.expr == .e_identifier and
|
||||
id.ref.eql(p.ts_namespace.expr.e_identifier.ref) and
|
||||
identifier_opts.assign_target == .none and
|
||||
!identifier_opts.is_delete_target)
|
||||
{
|
||||
return maybeRewritePropertyAccessForNamespace(p, name, &target, loc, name_loc);
|
||||
}
|
||||
},
|
||||
.e_string => |str| {
|
||||
if (p.options.features.minify_syntax) {
|
||||
// minify "long-string".length to 11
|
||||
if (strings.eqlComptime(name, "length")) {
|
||||
if (str.javascriptLength()) |len| {
|
||||
return p.newExpr(E.Number{ .value = @floatFromInt(len) }, loc);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.e_inlined_enum => |ie| {
|
||||
continue :sw ie.value.data;
|
||||
},
|
||||
.e_object => |obj| {
|
||||
if (comptime FeatureFlags.inline_properties_in_transpiler) {
|
||||
if (p.options.features.minify_syntax) {
|
||||
// Rewrite a property access like this:
|
||||
// { f: () => {} }.f
|
||||
// To:
|
||||
// () => {}
|
||||
//
|
||||
// To avoid thinking too much about edgecases, only do this for:
|
||||
// 1) Objects with a single property
|
||||
// 2) Not a method, not a computed property
|
||||
if (obj.properties.len == 1 and
|
||||
!identifier_opts.is_delete_target and
|
||||
identifier_opts.assign_target == .none and !identifier_opts.is_call_target)
|
||||
{
|
||||
const prop: G.Property = obj.properties.ptr[0];
|
||||
if (prop.value != null and
|
||||
prop.flags.count() == 0 and
|
||||
prop.key != null and
|
||||
prop.key.?.data == .e_string and
|
||||
prop.key.?.data.e_string.eql([]const u8, name) and
|
||||
!bun.strings.eqlComptime(name, "__proto__"))
|
||||
{
|
||||
return prop.value.?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.e_import_meta => {
|
||||
if (strings.eqlComptime(name, "main")) {
|
||||
return p.valueForImportMetaMain(false, target.loc);
|
||||
}
|
||||
|
||||
if (strings.eqlComptime(name, "hot")) {
|
||||
return .{ .data = .{
|
||||
.e_special = if (p.options.features.hot_module_reloading) .hot_enabled else .hot_disabled,
|
||||
}, .loc = loc };
|
||||
}
|
||||
|
||||
// Inline import.meta properties for Bake
|
||||
if (p.options.framework != null) {
|
||||
if (strings.eqlComptime(name, "dir") or strings.eqlComptime(name, "dirname")) {
|
||||
// Inline import.meta.dir
|
||||
return p.newExpr(E.String.init(p.source.path.name.dir), name_loc);
|
||||
} else if (strings.eqlComptime(name, "file")) {
|
||||
// Inline import.meta.file (filename only)
|
||||
return p.newExpr(E.String.init(p.source.path.name.filename), name_loc);
|
||||
} else if (strings.eqlComptime(name, "path")) {
|
||||
// Inline import.meta.path (full path)
|
||||
return p.newExpr(E.String.init(p.source.path.text), name_loc);
|
||||
} else if (strings.eqlComptime(name, "url")) {
|
||||
// Inline import.meta.url as file:// URL
|
||||
const bunstr = bun.String.fromBytes(p.source.path.text);
|
||||
defer bunstr.deref();
|
||||
const url = std.fmt.allocPrint(p.allocator, "{s}", .{jsc.URL.fileURLFromString(bunstr)}) catch unreachable;
|
||||
return p.newExpr(E.String.init(url), name_loc);
|
||||
}
|
||||
}
|
||||
|
||||
// Make all property accesses on `import.meta.url` side effect free.
|
||||
return p.newExpr(
|
||||
E.Dot{
|
||||
.target = target,
|
||||
.name = name,
|
||||
.name_loc = name_loc,
|
||||
.can_be_removed_if_unused = true,
|
||||
},
|
||||
target.loc,
|
||||
);
|
||||
},
|
||||
.e_require_call_target => {
|
||||
if (strings.eqlComptime(name, "main")) {
|
||||
return .{ .loc = loc, .data = .e_require_main };
|
||||
}
|
||||
},
|
||||
.e_import_identifier => |id| {
|
||||
// Symbol uses due to a property access off of an imported symbol are tracked
|
||||
// specially. This lets us do tree shaking for cross-file TypeScript enums.
|
||||
if (p.options.bundle and !p.is_control_flow_dead) {
|
||||
const use = p.symbol_uses.getPtr(id.ref).?;
|
||||
use.count_estimate -|= 1;
|
||||
// note: this use is not removed as we assume it exists later
|
||||
|
||||
// Add a special symbol use instead
|
||||
const gop = p.import_symbol_property_uses.getOrPutValue(
|
||||
p.allocator,
|
||||
id.ref,
|
||||
.{},
|
||||
) catch bun.outOfMemory();
|
||||
const inner_use = gop.value_ptr.getOrPutValue(
|
||||
p.allocator,
|
||||
name,
|
||||
.{},
|
||||
) catch bun.outOfMemory();
|
||||
inner_use.value_ptr.count_estimate += 1;
|
||||
}
|
||||
},
|
||||
inline .e_dot, .e_index => |data, tag| {
|
||||
if (p.ts_namespace.expr == tag and
|
||||
data == @field(p.ts_namespace.expr, @tagName(tag)) and
|
||||
identifier_opts.assign_target == .none and
|
||||
!identifier_opts.is_delete_target)
|
||||
{
|
||||
return maybeRewritePropertyAccessForNamespace(p, name, &target, loc, name_loc);
|
||||
}
|
||||
},
|
||||
.e_special => |special| switch (special) {
|
||||
.module_exports => {
|
||||
if (p.shouldUnwrapCommonJSToESM()) {
|
||||
if (!p.is_control_flow_dead) {
|
||||
if (!p.commonjs_named_exports_deoptimized) {
|
||||
if (identifier_opts.is_delete_target) {
|
||||
p.deoptimizeCommonJSNamedExports();
|
||||
return null;
|
||||
}
|
||||
|
||||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
|
||||
if (!named_export_entry.found_existing) {
|
||||
const new_ref = p.newSymbol(
|
||||
.other,
|
||||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||||
) catch unreachable;
|
||||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||||
named_export_entry.value_ptr.* = .{
|
||||
.loc_ref = LocRef{
|
||||
.loc = name_loc,
|
||||
.ref = new_ref,
|
||||
},
|
||||
.needs_decl = true,
|
||||
};
|
||||
if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
|
||||
p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
|
||||
}
|
||||
|
||||
const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
|
||||
p.recordUsage(ref);
|
||||
|
||||
return p.newExpr(
|
||||
E.CommonJSExportIdentifier{
|
||||
.ref = ref,
|
||||
// Record this as from module.exports
|
||||
.base = .module_dot_exports,
|
||||
},
|
||||
name_loc,
|
||||
);
|
||||
} else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
|
||||
p.has_commonjs_export_names = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.hot_enabled, .hot_disabled => {
|
||||
const enabled = p.options.features.hot_module_reloading;
|
||||
if (bun.strings.eqlComptime(name, "data")) {
|
||||
return if (enabled)
|
||||
.{ .data = .{ .e_special = .hot_data }, .loc = loc }
|
||||
else
|
||||
Expr.init(E.Object, .{}, loc);
|
||||
}
|
||||
if (bun.strings.eqlComptime(name, "accept")) {
|
||||
if (!enabled) {
|
||||
p.method_call_must_be_replaced_with_undefined = true;
|
||||
return .{ .data = .e_undefined, .loc = loc };
|
||||
}
|
||||
return .{ .data = .{
|
||||
.e_special = .hot_accept,
|
||||
}, .loc = loc };
|
||||
}
|
||||
const lookup_table = comptime bun.ComptimeStringMap(void, [_]struct { [:0]const u8, void }{
|
||||
.{ "decline", {} },
|
||||
.{ "dispose", {} },
|
||||
.{ "prune", {} },
|
||||
.{ "invalidate", {} },
|
||||
.{ "on", {} },
|
||||
.{ "off", {} },
|
||||
.{ "send", {} },
|
||||
});
|
||||
if (lookup_table.has(name)) {
|
||||
if (enabled) {
|
||||
return Expr.init(E.Dot, .{
|
||||
.target = Expr.initIdentifier(p.hmr_api_ref, target.loc),
|
||||
.name = name,
|
||||
.name_loc = name_loc,
|
||||
}, loc);
|
||||
} else {
|
||||
p.method_call_must_be_replaced_with_undefined = true;
|
||||
return .{ .data = .e_undefined, .loc = loc };
|
||||
}
|
||||
} else {
|
||||
// This error is a bit out of place since the HMR
|
||||
// API is validated in the parser instead of at
|
||||
// runtime. When the API is not validated in this
|
||||
// way, the developer may unintentionally read or
|
||||
// write internal fields of HMRModule.
|
||||
p.log.addError(
|
||||
p.source,
|
||||
loc,
|
||||
std.fmt.allocPrint(
|
||||
p.allocator,
|
||||
"import.meta.hot.{s} does not exist",
|
||||
.{name},
|
||||
) catch bun.outOfMemory(),
|
||||
) catch bun.outOfMemory();
|
||||
return .{ .data = .e_undefined, .loc = loc };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn maybeRewritePropertyAccessForNamespace(
|
||||
p: *P,
|
||||
name: string,
|
||||
target: *const Expr,
|
||||
loc: logger.Loc,
|
||||
name_loc: logger.Loc,
|
||||
) ?Expr {
|
||||
if (p.ts_namespace.map.?.get(name)) |value| {
|
||||
switch (value.data) {
|
||||
.enum_number => |num| {
|
||||
p.ignoreUsageOfIdentifierInDotChain(target.*);
|
||||
return p.wrapInlinedEnum(
|
||||
.{ .loc = loc, .data = .{ .e_number = .{ .value = num } } },
|
||||
name,
|
||||
);
|
||||
},
|
||||
|
||||
.enum_string => |str| {
|
||||
p.ignoreUsageOfIdentifierInDotChain(target.*);
|
||||
return p.wrapInlinedEnum(
|
||||
.{ .loc = loc, .data = .{ .e_string = str } },
|
||||
name,
|
||||
);
|
||||
},
|
||||
|
||||
.namespace => |namespace| {
|
||||
// If this isn't a constant, return a clone of this property access
|
||||
// but with the namespace member data associated with it so that
|
||||
// more property accesses off of this property access are recognized.
|
||||
const expr = if (js_lexer.isIdentifier(name))
|
||||
p.newExpr(E.Dot{
|
||||
.target = target.*,
|
||||
.name = name,
|
||||
.name_loc = name_loc,
|
||||
}, loc)
|
||||
else
|
||||
p.newExpr(E.Dot{
|
||||
.target = target.*,
|
||||
.name = name,
|
||||
.name_loc = name_loc,
|
||||
}, loc);
|
||||
|
||||
p.ts_namespace = .{
|
||||
.expr = expr.data,
|
||||
.map = namespace,
|
||||
};
|
||||
|
||||
return expr;
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn checkIfDefinedHelper(p: *P, expr: Expr) !Expr {
|
||||
return p.newExpr(
|
||||
E.Binary{
|
||||
.op = .bin_strict_eq,
|
||||
.left = p.newExpr(
|
||||
E.Unary{
|
||||
.op = .un_typeof,
|
||||
.value = expr,
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
.right = p.newExpr(
|
||||
E.String{ .data = "undefined" },
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn maybeDefinedHelper(p: *P, identifier_expr: Expr) !Expr {
|
||||
return p.newExpr(
|
||||
E.If{
|
||||
.test_ = try p.checkIfDefinedHelper(identifier_expr),
|
||||
.yes = p.newExpr(
|
||||
E.Identifier{
|
||||
.ref = (p.findSymbol(logger.Loc.Empty, "Object") catch unreachable).ref,
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
),
|
||||
.no = identifier_expr,
|
||||
},
|
||||
logger.Loc.Empty,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn maybeCommaSpreadError(p: *P, _comma_after_spread: ?logger.Loc) void {
|
||||
const comma_after_spread = _comma_after_spread orelse return;
|
||||
if (comma_after_spread.start == -1) return;
|
||||
|
||||
p.log.addRangeError(p.source, logger.Range{ .loc = comma_after_spread, .len = 1 }, "Unexpected \",\" after rest pattern") catch unreachable;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const FeatureFlags = bun.FeatureFlags;
|
||||
const assert = bun.assert;
|
||||
const js_lexer = bun.js_lexer;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const Flags = js_ast.Flags;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Decl = G.Decl;
|
||||
const Property = G.Property;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const IdentifierOpts = js_parser.IdentifierOpts;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const RelocateVars = js_parser.RelocateVars;
|
||||
const SideEffects = js_parser.SideEffects;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
1390
src/ast/parse.zig
Normal file
1390
src/ast/parse.zig
Normal file
File diff suppressed because it is too large
Load Diff
517
src/ast/parseFn.zig
Normal file
517
src/ast/parseFn.zig
Normal file
@@ -0,0 +1,517 @@
|
||||
pub fn ParseFn(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
|
||||
/// This assumes the "function" token has already been parsed
|
||||
pub fn parseFnStmt(noalias p: *P, loc: logger.Loc, noalias opts: *ParseStatementOptions, asyncRange: ?logger.Range) !Stmt {
|
||||
const is_generator = p.lexer.token == T.t_asterisk;
|
||||
const is_async = asyncRange != null;
|
||||
|
||||
if (is_generator) {
|
||||
// p.markSyntaxFeature(compat.Generator, p.lexer.Range())
|
||||
try p.lexer.next();
|
||||
} else if (is_async) {
|
||||
// p.markLoweredSyntaxFeature(compat.AsyncAwait, asyncRange, compat.Generator)
|
||||
}
|
||||
|
||||
switch (opts.lexical_decl) {
|
||||
.forbid => {
|
||||
try p.forbidLexicalDecl(loc);
|
||||
},
|
||||
|
||||
// Allow certain function statements in certain single-statement contexts
|
||||
.allow_fn_inside_if, .allow_fn_inside_label => {
|
||||
if (opts.is_typescript_declare or is_generator or is_async) {
|
||||
try p.forbidLexicalDecl(loc);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
var name: ?js_ast.LocRef = null;
|
||||
var nameText: string = "";
|
||||
|
||||
// The name is optional for "export default function() {}" pseudo-statements
|
||||
if (!opts.is_name_optional or p.lexer.token == T.t_identifier) {
|
||||
const nameLoc = p.lexer.loc();
|
||||
nameText = p.lexer.identifier;
|
||||
try p.lexer.expect(T.t_identifier);
|
||||
// Difference
|
||||
const ref = try p.newSymbol(Symbol.Kind.other, nameText);
|
||||
name = js_ast.LocRef{
|
||||
.loc = nameLoc,
|
||||
.ref = ref,
|
||||
};
|
||||
}
|
||||
|
||||
// Even anonymous functions can have TypeScript type parameters
|
||||
if (is_typescript_enabled) {
|
||||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||||
}
|
||||
|
||||
// Introduce a fake block scope for function declarations inside if statements
|
||||
var ifStmtScopeIndex: usize = 0;
|
||||
const hasIfScope = opts.lexical_decl == .allow_fn_inside_if;
|
||||
if (hasIfScope) {
|
||||
ifStmtScopeIndex = try p.pushScopeForParsePass(js_ast.Scope.Kind.block, loc);
|
||||
}
|
||||
|
||||
var scopeIndex: usize = 0;
|
||||
var pushedScopeForFunctionArgs = false;
|
||||
// Push scope if the current lexer token is an open parenthesis token.
|
||||
// That is, the parser is about parsing function arguments
|
||||
if (p.lexer.token == .t_open_paren) {
|
||||
scopeIndex = try p.pushScopeForParsePass(js_ast.Scope.Kind.function_args, p.lexer.loc());
|
||||
pushedScopeForFunctionArgs = true;
|
||||
}
|
||||
|
||||
var func = try p.parseFn(name, FnOrArrowDataParse{
|
||||
.needs_async_loc = loc,
|
||||
.async_range = asyncRange orelse logger.Range.None,
|
||||
.has_async_range = asyncRange != null,
|
||||
.allow_await = if (is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
.allow_yield = if (is_generator) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
.is_typescript_declare = opts.is_typescript_declare,
|
||||
|
||||
// Only allow omitting the body if we're parsing TypeScript
|
||||
.allow_missing_body_for_type_script = is_typescript_enabled,
|
||||
});
|
||||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// Don't output anything if it's just a forward declaration of a function
|
||||
if ((opts.is_typescript_declare or func.flags.contains(.is_forward_declaration)) and pushedScopeForFunctionArgs) {
|
||||
p.popAndDiscardScope(scopeIndex);
|
||||
|
||||
// Balance the fake block scope introduced above
|
||||
if (hasIfScope) {
|
||||
p.popScope();
|
||||
}
|
||||
|
||||
if (opts.is_typescript_declare and opts.is_namespace_scope and opts.is_export) {
|
||||
p.has_non_local_export_declare_inside_namespace = true;
|
||||
}
|
||||
|
||||
return p.s(S.TypeScript{}, loc);
|
||||
}
|
||||
}
|
||||
|
||||
if (pushedScopeForFunctionArgs) {
|
||||
p.popScope();
|
||||
}
|
||||
|
||||
// Only declare the function after we know if it had a body or not. Otherwise
|
||||
// TypeScript code such as this will double-declare the symbol:
|
||||
//
|
||||
// function foo(): void;
|
||||
// function foo(): void {}
|
||||
//
|
||||
if (name != null) {
|
||||
const kind = if (is_generator or is_async)
|
||||
Symbol.Kind.generator_or_async_function
|
||||
else
|
||||
Symbol.Kind.hoisted_function;
|
||||
|
||||
name.?.ref = try p.declareSymbol(kind, name.?.loc, nameText);
|
||||
func.name = name;
|
||||
}
|
||||
|
||||
func.flags.setPresent(.has_if_scope, hasIfScope);
|
||||
func.flags.setPresent(.is_export, opts.is_export);
|
||||
|
||||
// Balance the fake block scope introduced above
|
||||
if (hasIfScope) {
|
||||
p.popScope();
|
||||
}
|
||||
|
||||
return p.s(
|
||||
S.Function{
|
||||
.func = func,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn parseFn(p: *P, name: ?js_ast.LocRef, opts: FnOrArrowDataParse) anyerror!G.Fn {
|
||||
// if data.allowAwait and data.allowYield {
|
||||
// p.markSyntaxFeature(compat.AsyncGenerator, data.asyncRange)
|
||||
// }
|
||||
|
||||
var func = G.Fn{
|
||||
.name = name,
|
||||
|
||||
.flags = Flags.Function.init(.{
|
||||
.has_rest_arg = false,
|
||||
.is_async = opts.allow_await == .allow_expr,
|
||||
.is_generator = opts.allow_yield == .allow_expr,
|
||||
}),
|
||||
|
||||
.arguments_ref = null,
|
||||
.open_parens_loc = p.lexer.loc(),
|
||||
};
|
||||
try p.lexer.expect(T.t_open_paren);
|
||||
|
||||
// Await and yield are not allowed in function arguments
|
||||
var old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_parse);
|
||||
|
||||
p.fn_or_arrow_data_parse.allow_await = if (opts.allow_await == .allow_expr)
|
||||
AwaitOrYield.forbid_all
|
||||
else
|
||||
AwaitOrYield.allow_ident;
|
||||
|
||||
p.fn_or_arrow_data_parse.allow_yield = if (opts.allow_yield == .allow_expr)
|
||||
AwaitOrYield.forbid_all
|
||||
else
|
||||
AwaitOrYield.allow_ident;
|
||||
|
||||
// Don't suggest inserting "async" before anything if "await" is found
|
||||
p.fn_or_arrow_data_parse.needs_async_loc = logger.Loc.Empty;
|
||||
|
||||
// If "super()" is allowed in the body, it's allowed in the arguments
|
||||
p.fn_or_arrow_data_parse.allow_super_call = opts.allow_super_call;
|
||||
p.fn_or_arrow_data_parse.allow_super_property = opts.allow_super_property;
|
||||
|
||||
var rest_arg: bool = false;
|
||||
var arg_has_decorators: bool = false;
|
||||
var args = List(G.Arg){};
|
||||
while (p.lexer.token != T.t_close_paren) {
|
||||
// Skip over "this" type annotations
|
||||
if (is_typescript_enabled and p.lexer.token == T.t_this) {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.token == T.t_colon) {
|
||||
try p.lexer.next();
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
}
|
||||
if (p.lexer.token != T.t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
var ts_decorators: []ExprNodeIndex = &([_]ExprNodeIndex{});
|
||||
if (opts.allow_ts_decorators) {
|
||||
ts_decorators = try p.parseTypeScriptDecorators();
|
||||
if (ts_decorators.len > 0) {
|
||||
arg_has_decorators = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!func.flags.contains(.has_rest_arg) and p.lexer.token == T.t_dot_dot_dot) {
|
||||
// p.markSyntaxFeature
|
||||
try p.lexer.next();
|
||||
rest_arg = true;
|
||||
func.flags.insert(.has_rest_arg);
|
||||
}
|
||||
|
||||
var is_typescript_ctor_field = false;
|
||||
const is_identifier = p.lexer.token == T.t_identifier;
|
||||
var text = p.lexer.identifier;
|
||||
var arg = try p.parseBinding(.{});
|
||||
var ts_metadata = TypeScript.Metadata.default;
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
if (is_identifier and opts.is_constructor) {
|
||||
// Skip over TypeScript accessibility modifiers, which turn this argument
|
||||
// into a class field when used inside a class constructor. This is known
|
||||
// as a "parameter property" in TypeScript.
|
||||
while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_identifier, .t_open_brace, .t_open_bracket => {
|
||||
if (!js_lexer.TypeScriptAccessibilityModifier.has(text)) {
|
||||
break;
|
||||
}
|
||||
|
||||
is_typescript_ctor_field = true;
|
||||
|
||||
// TypeScript requires an identifier binding
|
||||
if (p.lexer.token != .t_identifier) {
|
||||
try p.lexer.expect(.t_identifier);
|
||||
}
|
||||
text = p.lexer.identifier;
|
||||
|
||||
// Re-parse the binding (the current binding is the TypeScript keyword)
|
||||
arg = try p.parseBinding(.{});
|
||||
},
|
||||
else => {
|
||||
break;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// "function foo(a?) {}"
|
||||
if (p.lexer.token == .t_question) {
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
// "function foo(a: any) {}"
|
||||
if (p.lexer.token == .t_colon) {
|
||||
try p.lexer.next();
|
||||
if (!rest_arg) {
|
||||
if (p.options.features.emit_decorator_metadata and
|
||||
opts.allow_ts_decorators and
|
||||
(opts.has_argument_decorators or opts.has_decorators or arg_has_decorators))
|
||||
{
|
||||
ts_metadata = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||||
} else {
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
}
|
||||
} else {
|
||||
// rest parameter is always object, leave metadata as m_none
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var parseStmtOpts = ParseStatementOptions{};
|
||||
p.declareBinding(.hoisted, &arg, &parseStmtOpts) catch unreachable;
|
||||
|
||||
var default_value: ?ExprNodeIndex = null;
|
||||
if (!func.flags.contains(.has_rest_arg) and p.lexer.token == .t_equals) {
|
||||
// p.markSyntaxFeature
|
||||
try p.lexer.next();
|
||||
default_value = try p.parseExpr(.comma);
|
||||
}
|
||||
|
||||
args.append(p.allocator, G.Arg{
|
||||
.ts_decorators = ExprNodeList.init(ts_decorators),
|
||||
.binding = arg,
|
||||
.default = default_value,
|
||||
|
||||
// We need to track this because it affects code generation
|
||||
.is_typescript_ctor_field = is_typescript_ctor_field,
|
||||
.ts_metadata = ts_metadata,
|
||||
}) catch unreachable;
|
||||
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (func.flags.contains(.has_rest_arg)) {
|
||||
// JavaScript does not allow a comma after a rest argument
|
||||
if (opts.is_typescript_declare) {
|
||||
// TypeScript does allow a comma after a rest argument in a "declare" context
|
||||
try p.lexer.next();
|
||||
} else {
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
rest_arg = false;
|
||||
}
|
||||
if (args.items.len > 0) {
|
||||
func.args = args.items;
|
||||
}
|
||||
|
||||
// Reserve the special name "arguments" in this scope. This ensures that it
|
||||
// shadows any variable called "arguments" in any parent scopes. But only do
|
||||
// this if it wasn't already declared above because arguments are allowed to
|
||||
// be called "arguments", in which case the real "arguments" is inaccessible.
|
||||
if (!p.current_scope.members.contains("arguments")) {
|
||||
func.arguments_ref = p.declareSymbolMaybeGenerated(.arguments, func.open_parens_loc, arguments_str, false) catch unreachable;
|
||||
p.symbols.items[func.arguments_ref.?.innerIndex()].must_not_be_renamed = true;
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
p.fn_or_arrow_data_parse = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_parse), &old_fn_or_arrow_data);
|
||||
|
||||
p.fn_or_arrow_data_parse.has_argument_decorators = arg_has_decorators;
|
||||
|
||||
// "function foo(): any {}"
|
||||
if (is_typescript_enabled) {
|
||||
if (p.lexer.token == .t_colon) {
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.options.features.emit_decorator_metadata and opts.allow_ts_decorators and (opts.has_argument_decorators or opts.has_decorators)) {
|
||||
func.return_ts_metadata = try p.skipTypescriptReturnTypeWithMetadata();
|
||||
} else {
|
||||
try p.skipTypescriptReturnType();
|
||||
}
|
||||
} else if (p.options.features.emit_decorator_metadata and opts.allow_ts_decorators and (opts.has_argument_decorators or opts.has_decorators)) {
|
||||
if (func.flags.contains(.is_async)) {
|
||||
func.return_ts_metadata = .m_promise;
|
||||
} else {
|
||||
func.return_ts_metadata = .m_undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// "function foo(): any;"
|
||||
if (opts.allow_missing_body_for_type_script and p.lexer.token != .t_open_brace) {
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
func.flags.insert(.is_forward_declaration);
|
||||
return func;
|
||||
}
|
||||
var tempOpts = opts;
|
||||
func.body = try p.parseFnBody(&tempOpts);
|
||||
|
||||
return func;
|
||||
}
|
||||
|
||||
pub fn parseFnExpr(p: *P, loc: logger.Loc, is_async: bool, async_range: logger.Range) !Expr {
|
||||
try p.lexer.next();
|
||||
const is_generator = p.lexer.token == T.t_asterisk;
|
||||
if (is_generator) {
|
||||
// p.markSyntaxFeature()
|
||||
try p.lexer.next();
|
||||
} else if (is_async) {
|
||||
// p.markLoweredSyntaxFeature(compat.AsyncAwait, asyncRange, compat.Generator)
|
||||
}
|
||||
|
||||
var name: ?js_ast.LocRef = null;
|
||||
|
||||
_ = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||||
|
||||
// The name is optional
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
const text = p.lexer.identifier;
|
||||
|
||||
// Don't declare the name "arguments" since it's shadowed and inaccessible
|
||||
name = js_ast.LocRef{
|
||||
.loc = p.lexer.loc(),
|
||||
.ref = if (text.len > 0 and !strings.eqlComptime(text, "arguments"))
|
||||
try p.declareSymbol(.hoisted_function, p.lexer.loc(), text)
|
||||
else
|
||||
try p.newSymbol(.hoisted_function, text),
|
||||
};
|
||||
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
// Even anonymous functions can have TypeScript type parameters
|
||||
if (comptime is_typescript_enabled) {
|
||||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||||
}
|
||||
|
||||
const func = try p.parseFn(name, FnOrArrowDataParse{
|
||||
.needs_async_loc = loc,
|
||||
.async_range = async_range,
|
||||
.allow_await = if (is_async) .allow_expr else .allow_ident,
|
||||
.allow_yield = if (is_generator) .allow_expr else .allow_ident,
|
||||
});
|
||||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||||
|
||||
p.validateFunctionName(func, .expr);
|
||||
p.popScope();
|
||||
|
||||
return p.newExpr(js_ast.E.Function{
|
||||
.func = func,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
pub fn parseFnBody(p: *P, data: *FnOrArrowDataParse) !G.FnBody {
|
||||
const oldFnOrArrowData = p.fn_or_arrow_data_parse;
|
||||
const oldAllowIn = p.allow_in;
|
||||
p.fn_or_arrow_data_parse = data.*;
|
||||
p.allow_in = true;
|
||||
|
||||
const loc = p.lexer.loc();
|
||||
var pushedScopeForFunctionBody = false;
|
||||
if (p.lexer.token == .t_open_brace) {
|
||||
_ = try p.pushScopeForParsePass(Scope.Kind.function_body, p.lexer.loc());
|
||||
pushedScopeForFunctionBody = true;
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
var opts = ParseStatementOptions{};
|
||||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &opts);
|
||||
try p.lexer.next();
|
||||
|
||||
if (pushedScopeForFunctionBody) p.popScope();
|
||||
|
||||
p.allow_in = oldAllowIn;
|
||||
p.fn_or_arrow_data_parse = oldFnOrArrowData;
|
||||
return G.FnBody{ .loc = loc, .stmts = stmts };
|
||||
}
|
||||
|
||||
pub fn parseArrowBody(p: *P, args: []js_ast.G.Arg, data: *FnOrArrowDataParse) !E.Arrow {
|
||||
const arrow_loc = p.lexer.loc();
|
||||
|
||||
// Newlines are not allowed before "=>"
|
||||
if (p.lexer.has_newline_before) {
|
||||
try p.log.addRangeError(p.source, p.lexer.range(), "Unexpected newline before \"=>\"");
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
try p.lexer.expect(T.t_equals_greater_than);
|
||||
|
||||
for (args) |*arg| {
|
||||
var opts = ParseStatementOptions{};
|
||||
try p.declareBinding(Symbol.Kind.hoisted, &arg.binding, &opts);
|
||||
}
|
||||
|
||||
// The ability to use "this" and "super()" is inherited by arrow functions
|
||||
data.allow_super_call = p.fn_or_arrow_data_parse.allow_super_call;
|
||||
data.allow_super_property = p.fn_or_arrow_data_parse.allow_super_property;
|
||||
data.is_this_disallowed = p.fn_or_arrow_data_parse.is_this_disallowed;
|
||||
|
||||
if (p.lexer.token == .t_open_brace) {
|
||||
const body = try p.parseFnBody(data);
|
||||
p.after_arrow_body_loc = p.lexer.loc();
|
||||
return E.Arrow{ .args = args, .body = body };
|
||||
}
|
||||
|
||||
_ = try p.pushScopeForParsePass(Scope.Kind.function_body, arrow_loc);
|
||||
defer p.popScope();
|
||||
|
||||
var old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_parse);
|
||||
|
||||
p.fn_or_arrow_data_parse = data.*;
|
||||
const expr = try p.parseExpr(Level.comma);
|
||||
p.fn_or_arrow_data_parse = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_parse), &old_fn_or_arrow_data);
|
||||
|
||||
var stmts = try p.allocator.alloc(Stmt, 1);
|
||||
stmts[0] = p.s(S.Return{ .value = expr }, expr.loc);
|
||||
return E.Arrow{ .args = args, .prefer_expr = true, .body = G.FnBody{ .loc = arrow_loc, .stmts = stmts } };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const S = js_ast.S;
|
||||
const Scope = js_ast.Scope;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Arg = G.Arg;
|
||||
|
||||
const Op = js_ast.Op;
|
||||
const Level = js_ast.Op.Level;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const AwaitOrYield = js_parser.AwaitOrYield;
|
||||
const FnOrArrowDataParse = js_parser.FnOrArrowDataParse;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const ParseStatementOptions = js_parser.ParseStatementOptions;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const arguments_str = js_parser.arguments_str;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
437
src/ast/parseImportExport.zig
Normal file
437
src/ast/parseImportExport.zig
Normal file
@@ -0,0 +1,437 @@
|
||||
pub fn ParseImportExport(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
const only_scan_imports_and_do_not_visit = P.only_scan_imports_and_do_not_visit;
|
||||
|
||||
/// Note: The caller has already parsed the "import" keyword
|
||||
pub fn parseImportExpr(noalias p: *P, loc: logger.Loc, level: Level) anyerror!Expr {
|
||||
// Parse an "import.meta" expression
|
||||
if (p.lexer.token == .t_dot) {
|
||||
p.esm_import_keyword = js_lexer.rangeOfIdentifier(p.source, loc);
|
||||
try p.lexer.next();
|
||||
if (p.lexer.isContextualKeyword("meta")) {
|
||||
try p.lexer.next();
|
||||
p.has_import_meta = true;
|
||||
return p.newExpr(E.ImportMeta{}, loc);
|
||||
} else {
|
||||
try p.lexer.expectedString("\"meta\"");
|
||||
}
|
||||
}
|
||||
|
||||
if (level.gt(.call)) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||||
p.log.addRangeError(p.source, r, "Cannot use an \"import\" expression here without parentheses") catch unreachable;
|
||||
}
|
||||
|
||||
// allow "in" inside call arguments;
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
p.lexer.preserve_all_comments_before = true;
|
||||
try p.lexer.expect(.t_open_paren);
|
||||
|
||||
// const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice();
|
||||
p.lexer.comments_to_preserve_before.clearRetainingCapacity();
|
||||
|
||||
p.lexer.preserve_all_comments_before = false;
|
||||
|
||||
const value = try p.parseExpr(.comma);
|
||||
|
||||
var import_options = Expr.empty;
|
||||
if (p.lexer.token == .t_comma) {
|
||||
// "import('./foo.json', )"
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.token != .t_close_paren) {
|
||||
// "import('./foo.json', { assert: { type: 'json' } })"
|
||||
import_options = try p.parseExpr(.comma);
|
||||
|
||||
if (p.lexer.token == .t_comma) {
|
||||
// "import('./foo.json', { assert: { type: 'json' } }, )"
|
||||
try p.lexer.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
if (comptime only_scan_imports_and_do_not_visit) {
|
||||
if (value.data == .e_string and value.data.e_string.isUTF8() and value.data.e_string.isPresent()) {
|
||||
const import_record_index = p.addImportRecord(.dynamic, value.loc, value.data.e_string.slice(p.allocator));
|
||||
|
||||
return p.newExpr(E.Import{
|
||||
.expr = value,
|
||||
// .leading_interior_comments = comments,
|
||||
.import_record_index = import_record_index,
|
||||
.options = import_options,
|
||||
}, loc);
|
||||
}
|
||||
}
|
||||
|
||||
// _ = comments; // TODO: leading_interior comments
|
||||
|
||||
return p.newExpr(E.Import{
|
||||
.expr = value,
|
||||
// .leading_interior_comments = comments,
|
||||
.import_record_index = std.math.maxInt(u32),
|
||||
.options = import_options,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
pub fn parseImportClause(
|
||||
p: *P,
|
||||
) !ImportClause {
|
||||
var items = ListManaged(js_ast.ClauseItem).init(p.allocator);
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
// this variable should not exist if we're not in a typescript file
|
||||
var had_type_only_imports = if (comptime is_typescript_enabled)
|
||||
false;
|
||||
|
||||
while (p.lexer.token != .t_close_brace) {
|
||||
// The alias may be a keyword;
|
||||
const isIdentifier = p.lexer.token == .t_identifier;
|
||||
const alias_loc = p.lexer.loc();
|
||||
const alias = try p.parseClauseAlias("import");
|
||||
var name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(alias) };
|
||||
var original_name = alias;
|
||||
try p.lexer.next();
|
||||
|
||||
const probably_type_only_import = if (comptime is_typescript_enabled)
|
||||
strings.eqlComptime(alias, "type") and
|
||||
p.lexer.token != .t_comma and
|
||||
p.lexer.token != .t_close_brace
|
||||
else
|
||||
false;
|
||||
|
||||
// "import { type xx } from 'mod'"
|
||||
// "import { type xx as yy } from 'mod'"
|
||||
// "import { type 'xx' as yy } from 'mod'"
|
||||
// "import { type as } from 'mod'"
|
||||
// "import { type as as } from 'mod'"
|
||||
// "import { type as as as } from 'mod'"
|
||||
if (probably_type_only_import) {
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
original_name = p.lexer.identifier;
|
||||
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
|
||||
// "import { type as as as } from 'mod'"
|
||||
// "import { type as as foo } from 'mod'"
|
||||
had_type_only_imports = true;
|
||||
try p.lexer.next();
|
||||
} else {
|
||||
// "import { type as as } from 'mod'"
|
||||
|
||||
try items.append(.{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
});
|
||||
}
|
||||
} else if (p.lexer.token == .t_identifier) {
|
||||
had_type_only_imports = true;
|
||||
|
||||
// "import { type as xxx } from 'mod'"
|
||||
original_name = p.lexer.identifier;
|
||||
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
|
||||
try p.lexer.expect(.t_identifier);
|
||||
|
||||
if (isEvalOrArguments(original_name)) {
|
||||
const r = p.source.rangeOfString(name.loc);
|
||||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {s} as an identifier here", .{original_name});
|
||||
}
|
||||
|
||||
try items.append(.{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const is_identifier = p.lexer.token == .t_identifier;
|
||||
|
||||
// "import { type xx } from 'mod'"
|
||||
// "import { type xx as yy } from 'mod'"
|
||||
// "import { type if as yy } from 'mod'"
|
||||
// "import { type 'xx' as yy } from 'mod'"
|
||||
_ = try p.parseClauseAlias("import");
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
|
||||
try p.lexer.expect(.t_identifier);
|
||||
} else if (!is_identifier) {
|
||||
// An import where the name is a keyword must have an alias
|
||||
try p.lexer.expectedString("\"as\"");
|
||||
}
|
||||
had_type_only_imports = true;
|
||||
}
|
||||
} else {
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
original_name = p.lexer.identifier;
|
||||
name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(original_name) };
|
||||
try p.lexer.expect(.t_identifier);
|
||||
} else if (!isIdentifier) {
|
||||
// An import where the name is a keyword must have an alias
|
||||
try p.lexer.expectedString("\"as\"");
|
||||
}
|
||||
|
||||
// Reject forbidden names
|
||||
if (isEvalOrArguments(original_name)) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, name.loc);
|
||||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name});
|
||||
}
|
||||
|
||||
try items.append(js_ast.ClauseItem{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
});
|
||||
}
|
||||
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
return ImportClause{
|
||||
.items = items.items,
|
||||
.is_single_line = is_single_line,
|
||||
.had_type_only_imports = if (comptime is_typescript_enabled)
|
||||
had_type_only_imports
|
||||
else
|
||||
false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parseExportClause(p: *P) !ExportClauseResult {
|
||||
var items = ListManaged(js_ast.ClauseItem).initCapacity(p.allocator, 1) catch unreachable;
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var first_non_identifier_loc = logger.Loc{ .start = 0 };
|
||||
var had_type_only_exports = false;
|
||||
|
||||
while (p.lexer.token != .t_close_brace) {
|
||||
var alias = try p.parseClauseAlias("export");
|
||||
var alias_loc = p.lexer.loc();
|
||||
|
||||
const name = LocRef{
|
||||
.loc = alias_loc,
|
||||
.ref = p.storeNameInRef(alias) catch unreachable,
|
||||
};
|
||||
const original_name = alias;
|
||||
|
||||
// The name can actually be a keyword if we're really an "export from"
|
||||
// statement. However, we won't know until later. Allow keywords as
|
||||
// identifiers for now and throw an error later if there's no "from".
|
||||
//
|
||||
// // This is fine
|
||||
// export { default } from 'path'
|
||||
//
|
||||
// // This is a syntax error
|
||||
// export { default }
|
||||
//
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||||
first_non_identifier_loc = p.lexer.loc();
|
||||
}
|
||||
try p.lexer.next();
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
if (strings.eqlComptime(alias, "type") and p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
alias = try p.parseClauseAlias("export");
|
||||
alias_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||||
// "export { type as as as }"
|
||||
// "export { type as as foo }"
|
||||
// "export { type as as 'foo' }"
|
||||
_ = p.parseClauseAlias("export") catch "";
|
||||
had_type_only_exports = true;
|
||||
try p.lexer.next();
|
||||
} else {
|
||||
// "export { type as as }"
|
||||
items.append(js_ast.ClauseItem{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
}) catch unreachable;
|
||||
}
|
||||
} else if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||||
// "export { type as xxx }"
|
||||
// "export { type as 'xxx' }"
|
||||
alias = try p.parseClauseAlias("export");
|
||||
alias_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
items.append(js_ast.ClauseItem{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
}) catch unreachable;
|
||||
} else {
|
||||
had_type_only_exports = true;
|
||||
}
|
||||
} else {
|
||||
// The name can actually be a keyword if we're really an "export from"
|
||||
// statement. However, we won't know until later. Allow keywords as
|
||||
// identifiers for now and throw an error later if there's no "from".
|
||||
//
|
||||
// // This is fine
|
||||
// export { default } from 'path'
|
||||
//
|
||||
// // This is a syntax error
|
||||
// export { default }
|
||||
//
|
||||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||||
first_non_identifier_loc = p.lexer.loc();
|
||||
}
|
||||
|
||||
// "export { type xx }"
|
||||
// "export { type xx as yy }"
|
||||
// "export { type xx as if }"
|
||||
// "export { type default } from 'path'"
|
||||
// "export { type default as if } from 'path'"
|
||||
// "export { type xx as 'yy' }"
|
||||
// "export { type 'xx' } from 'mod'"
|
||||
_ = p.parseClauseAlias("export") catch "";
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
_ = p.parseClauseAlias("export") catch "";
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
had_type_only_exports = true;
|
||||
}
|
||||
} else {
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
alias = try p.parseClauseAlias("export");
|
||||
alias_loc = p.lexer.loc();
|
||||
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
items.append(js_ast.ClauseItem{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
}) catch unreachable;
|
||||
}
|
||||
} else {
|
||||
if (p.lexer.isContextualKeyword("as")) {
|
||||
try p.lexer.next();
|
||||
alias = try p.parseClauseAlias("export");
|
||||
alias_loc = p.lexer.loc();
|
||||
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
items.append(js_ast.ClauseItem{
|
||||
.alias = alias,
|
||||
.alias_loc = alias_loc,
|
||||
.name = name,
|
||||
.original_name = original_name,
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
// we're done if there's no comma
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
try p.lexer.next();
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
|
||||
// Throw an error here if we found a keyword earlier and this isn't an
|
||||
// "export from" statement after all
|
||||
if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc);
|
||||
try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true);
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return ExportClauseResult{
|
||||
.clauses = items.items,
|
||||
.is_single_line = is_single_line,
|
||||
.had_type_only_exports = had_type_only_exports,
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const js_lexer = bun.js_lexer;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const LocRef = js_ast.LocRef;
|
||||
|
||||
const Op = js_ast.Op;
|
||||
const Level = js_ast.Op.Level;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const ExportClauseResult = js_parser.ExportClauseResult;
|
||||
const ImportClause = js_parser.ImportClause;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const isEvalOrArguments = js_parser.isEvalOrArguments;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const ListManaged = std.ArrayList;
|
||||
319
src/ast/parseJSXElement.zig
Normal file
319
src/ast/parseJSXElement.zig
Normal file
@@ -0,0 +1,319 @@
|
||||
pub fn ParseJSXElement(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
const only_scan_imports_and_do_not_visit = P.only_scan_imports_and_do_not_visit;
|
||||
|
||||
pub fn parseJSXElement(noalias p: *P, loc: logger.Loc) anyerror!Expr {
|
||||
if (only_scan_imports_and_do_not_visit) {
|
||||
p.needs_jsx_import = true;
|
||||
}
|
||||
|
||||
const tag = try JSXTag.parse(P, p);
|
||||
|
||||
// The tag may have TypeScript type arguments: "<Foo<T>/>"
|
||||
if (is_typescript_enabled) {
|
||||
// Pass a flag to the type argument skipper because we need to call
|
||||
_ = try p.skipTypeScriptTypeArguments(true);
|
||||
}
|
||||
|
||||
var previous_string_with_backslash_loc = logger.Loc{};
|
||||
var properties = G.Property.List{};
|
||||
var key_prop_i: i32 = -1;
|
||||
var flags = Flags.JSXElement.Bitset{};
|
||||
var start_tag: ?ExprNodeIndex = null;
|
||||
|
||||
// Fragments don't have props
|
||||
// Fragments of the form "React.Fragment" are not parsed as fragments.
|
||||
if (@as(JSXTag.TagType, tag.data) == .tag) {
|
||||
start_tag = tag.data.tag;
|
||||
|
||||
var spread_loc: logger.Loc = logger.Loc.Empty;
|
||||
var props = ListManaged(G.Property).init(p.allocator);
|
||||
var first_spread_prop_i: i32 = -1;
|
||||
var i: i32 = 0;
|
||||
parse_attributes: while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_identifier => {
|
||||
defer i += 1;
|
||||
// Parse the prop name
|
||||
const key_range = p.lexer.range();
|
||||
const prop_name_literal = p.lexer.identifier;
|
||||
const special_prop = E.JSXElement.SpecialProp.Map.get(prop_name_literal) orelse E.JSXElement.SpecialProp.any;
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
|
||||
if (special_prop == .key) {
|
||||
// <ListItem key>
|
||||
if (p.lexer.token != .t_equals) {
|
||||
// Unlike Babel, we're going to just warn here and move on.
|
||||
try p.log.addWarning(p.source, key_range.loc, "\"key\" prop ignored. Must be a string, number or symbol.");
|
||||
continue;
|
||||
}
|
||||
|
||||
key_prop_i = i;
|
||||
}
|
||||
|
||||
const prop_name = p.newExpr(E.String{ .data = prop_name_literal }, key_range.loc);
|
||||
|
||||
// Parse the value
|
||||
var value: Expr = undefined;
|
||||
if (p.lexer.token != .t_equals) {
|
||||
|
||||
// Implicitly true value
|
||||
// <button selected>
|
||||
value = p.newExpr(E.Boolean{ .value = true }, logger.Loc{ .start = key_range.loc.start + key_range.len });
|
||||
} else {
|
||||
value = try p.parseJSXPropValueIdentifier(&previous_string_with_backslash_loc);
|
||||
}
|
||||
|
||||
try props.append(G.Property{ .key = prop_name, .value = value });
|
||||
},
|
||||
.t_open_brace => {
|
||||
defer i += 1;
|
||||
// Use Next() not ExpectInsideJSXElement() so we can parse "..."
|
||||
try p.lexer.next();
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_dot_dot_dot => {
|
||||
try p.lexer.next();
|
||||
|
||||
if (first_spread_prop_i == -1) first_spread_prop_i = i;
|
||||
spread_loc = p.lexer.loc();
|
||||
try props.append(G.Property{ .value = try p.parseExpr(.comma), .kind = .spread });
|
||||
},
|
||||
// This implements
|
||||
// <div {foo} />
|
||||
// ->
|
||||
// <div foo={foo} />
|
||||
T.t_identifier => {
|
||||
// we need to figure out what the key they mean is
|
||||
// to do that, we must determine the key name
|
||||
const expr = try p.parseExpr(Level.lowest);
|
||||
|
||||
const key = brk: {
|
||||
switch (expr.data) {
|
||||
.e_import_identifier => |ident| {
|
||||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||||
},
|
||||
.e_commonjs_export_identifier => |ident| {
|
||||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||||
},
|
||||
.e_identifier => |ident| {
|
||||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||||
},
|
||||
.e_dot => |dot| {
|
||||
break :brk p.newExpr(E.String{ .data = dot.name }, dot.name_loc);
|
||||
},
|
||||
.e_index => |index| {
|
||||
if (index.index.data == .e_string) {
|
||||
break :brk index.index;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// If we get here, it's invalid
|
||||
try p.log.addError(p.source, expr.loc, "Invalid JSX prop shorthand, must be identifier, dot or string");
|
||||
return error.SyntaxError;
|
||||
};
|
||||
|
||||
try props.append(G.Property{ .value = expr, .key = key, .kind = .normal });
|
||||
},
|
||||
// This implements
|
||||
// <div {"foo"} />
|
||||
// <div {'foo'} />
|
||||
// ->
|
||||
// <div foo="foo" />
|
||||
// note: template literals are not supported, operations on strings are not supported either
|
||||
T.t_string_literal => {
|
||||
const key = p.newExpr(try p.lexer.toEString(), p.lexer.loc());
|
||||
try p.lexer.next();
|
||||
try props.append(G.Property{ .value = key, .key = key, .kind = .normal });
|
||||
},
|
||||
|
||||
else => try p.lexer.unexpected(),
|
||||
}
|
||||
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
},
|
||||
else => {
|
||||
break :parse_attributes;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const is_key_after_spread = key_prop_i > -1 and first_spread_prop_i > -1 and key_prop_i > first_spread_prop_i;
|
||||
flags.setPresent(.is_key_after_spread, is_key_after_spread);
|
||||
properties = G.Property.List.fromList(props);
|
||||
if (is_key_after_spread and p.options.jsx.runtime == .automatic and !p.has_classic_runtime_warned) {
|
||||
try p.log.addWarning(p.source, spread_loc, "\"key\" prop after a {...spread} is deprecated in JSX. Falling back to classic runtime.");
|
||||
p.has_classic_runtime_warned = true;
|
||||
}
|
||||
}
|
||||
|
||||
// People sometimes try to use the output of "JSON.stringify()" as a JSX
|
||||
// attribute when automatically-generating JSX code. Doing so is incorrect
|
||||
// because JSX strings work like XML instead of like JS (since JSX is XML-in-
|
||||
// JS). Specifically, using a backslash before a quote does not cause it to
|
||||
// be escaped:
|
||||
//
|
||||
// JSX ends the "content" attribute here and sets "content" to 'some so-called \\'
|
||||
// v
|
||||
// <Button content="some so-called \"button text\"" />
|
||||
// ^
|
||||
// There is no "=" after the JSX attribute "text", so we expect a ">"
|
||||
//
|
||||
// This code special-cases this error to provide a less obscure error message.
|
||||
if (p.lexer.token == .t_syntax_error and strings.eqlComptime(p.lexer.raw(), "\\") and previous_string_with_backslash_loc.start > 0) {
|
||||
const r = p.lexer.range();
|
||||
// Not dealing with this right now.
|
||||
try p.log.addRangeError(p.source, r, "Invalid JSX escape - use XML entity codes quotes or pass a JavaScript string instead");
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
// A slash here is a self-closing element
|
||||
if (p.lexer.token == .t_slash) {
|
||||
const close_tag_loc = p.lexer.loc();
|
||||
// Use NextInsideJSXElement() not Next() so we can parse ">>" as ">"
|
||||
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
|
||||
if (p.lexer.token != .t_greater_than) {
|
||||
try p.lexer.expected(.t_greater_than);
|
||||
}
|
||||
|
||||
return p.newExpr(E.JSXElement{
|
||||
.tag = start_tag,
|
||||
.properties = properties,
|
||||
.key_prop_index = key_prop_i,
|
||||
.flags = flags,
|
||||
.close_tag_loc = close_tag_loc,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
// Use ExpectJSXElementChild() so we parse child strings
|
||||
try p.lexer.expectJSXElementChild(.t_greater_than);
|
||||
var children = ListManaged(Expr).init(p.allocator);
|
||||
// var last_element_i: usize = 0;
|
||||
|
||||
while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_string_literal => {
|
||||
try children.append(p.newExpr(try p.lexer.toEString(), loc));
|
||||
try p.lexer.nextJSXElementChild();
|
||||
},
|
||||
.t_open_brace => {
|
||||
// Use Next() instead of NextJSXElementChild() here since the next token is an expression
|
||||
try p.lexer.next();
|
||||
|
||||
const is_spread = p.lexer.token == .t_dot_dot_dot;
|
||||
if (is_spread) {
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
// The expression is optional, and may be absent
|
||||
if (p.lexer.token != .t_close_brace) {
|
||||
var item = try p.parseExpr(.lowest);
|
||||
if (is_spread) {
|
||||
item = p.newExpr(E.Spread{ .value = item }, loc);
|
||||
}
|
||||
try children.append(item);
|
||||
}
|
||||
|
||||
// Use ExpectJSXElementChild() so we parse child strings
|
||||
try p.lexer.expectJSXElementChild(.t_close_brace);
|
||||
},
|
||||
.t_less_than => {
|
||||
const less_than_loc = p.lexer.loc();
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
|
||||
if (p.lexer.token != .t_slash) {
|
||||
// This is a child element
|
||||
|
||||
children.append(try p.parseJSXElement(less_than_loc)) catch unreachable;
|
||||
|
||||
// The call to parseJSXElement() above doesn't consume the last
|
||||
// TGreaterThan because the caller knows what Next() function to call.
|
||||
// Use NextJSXElementChild() here since the next token is an element
|
||||
// child.
|
||||
try p.lexer.nextJSXElementChild();
|
||||
continue;
|
||||
}
|
||||
|
||||
// This is the closing element
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
const end_tag = try JSXTag.parse(P, p);
|
||||
|
||||
if (!strings.eql(end_tag.name, tag.name)) {
|
||||
try p.log.addRangeErrorFmtWithNote(
|
||||
p.source,
|
||||
end_tag.range,
|
||||
p.allocator,
|
||||
"Expected closing JSX tag to match opening tag \"\\<{s}\\>\"",
|
||||
.{tag.name},
|
||||
"Opening tag here:",
|
||||
.{},
|
||||
tag.range,
|
||||
);
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
if (p.lexer.token != .t_greater_than) {
|
||||
try p.lexer.expected(.t_greater_than);
|
||||
}
|
||||
|
||||
return p.newExpr(E.JSXElement{
|
||||
.tag = end_tag.data.asExpr(),
|
||||
.children = ExprNodeList.fromList(children),
|
||||
.properties = properties,
|
||||
.key_prop_index = key_prop_i,
|
||||
.flags = flags,
|
||||
.close_tag_loc = end_tag.range.loc,
|
||||
}, loc);
|
||||
},
|
||||
else => {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Property = G.Property;
|
||||
|
||||
const Op = js_ast.Op;
|
||||
const Level = js_ast.Op.Level;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const JSXTag = js_parser.JSXTag;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
const ListManaged = std.ArrayList;
|
||||
const Map = std.AutoHashMapUnmanaged;
|
||||
692
src/ast/parsePrefix.zig
Normal file
692
src/ast/parsePrefix.zig
Normal file
@@ -0,0 +1,692 @@
|
||||
pub fn ParsePrefix(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_jsx_enabled = P.is_jsx_enabled;
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
|
||||
pub fn parsePrefix(noalias p: *P, level: Level, noalias errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||||
const loc = p.lexer.loc();
|
||||
const l = @intFromEnum(level);
|
||||
// Output.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) });
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_super => {
|
||||
const superRange = p.lexer.range();
|
||||
try p.lexer.next();
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_open_paren => {
|
||||
if (l < @intFromEnum(Level.call) and p.fn_or_arrow_data_parse.allow_super_call) {
|
||||
return p.newExpr(E.Super{}, loc);
|
||||
}
|
||||
},
|
||||
.t_dot, .t_open_bracket => {
|
||||
if (p.fn_or_arrow_data_parse.allow_super_property) {
|
||||
return p.newExpr(E.Super{}, loc);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
p.log.addRangeError(p.source, superRange, "Unexpected \"super\"") catch unreachable;
|
||||
return p.newExpr(E.Super{}, loc);
|
||||
},
|
||||
.t_open_paren => {
|
||||
try p.lexer.next();
|
||||
|
||||
// Arrow functions aren't allowed in the middle of expressions
|
||||
if (level.gt(.assign)) {
|
||||
// Allow "in" inside parentheses
|
||||
const oldAllowIn = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
var value = try p.parseExpr(Level.lowest);
|
||||
p.markExprAsParenthesized(&value);
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
|
||||
p.allow_in = oldAllowIn;
|
||||
return value;
|
||||
}
|
||||
|
||||
return p.parseParenExpr(loc, level, ParenExprOpts{});
|
||||
},
|
||||
.t_false => {
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.Boolean{ .value = false }, loc);
|
||||
},
|
||||
.t_true => {
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.Boolean{ .value = true }, loc);
|
||||
},
|
||||
.t_null => {
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.Null{}, loc);
|
||||
},
|
||||
.t_this => {
|
||||
if (p.fn_or_arrow_data_parse.is_this_disallowed) {
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"this\" here") catch unreachable;
|
||||
}
|
||||
try p.lexer.next();
|
||||
return Expr{ .data = Prefill.Data.This, .loc = loc };
|
||||
},
|
||||
.t_private_identifier => {
|
||||
if (!p.allow_private_identifiers or !p.allow_in or level.gte(.compare)) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
const name = p.lexer.identifier;
|
||||
try p.lexer.next();
|
||||
|
||||
// Check for "#foo in bar"
|
||||
if (p.lexer.token != .t_in) {
|
||||
try p.lexer.expected(.t_in);
|
||||
}
|
||||
|
||||
return p.newExpr(E.PrivateIdentifier{ .ref = try p.storeNameInRef(name) }, loc);
|
||||
},
|
||||
.t_identifier => {
|
||||
const name = p.lexer.identifier;
|
||||
const name_range = p.lexer.range();
|
||||
const raw = p.lexer.raw();
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
// Handle async and await expressions
|
||||
switch (AsyncPrefixExpression.find(name)) {
|
||||
.is_async => {
|
||||
if ((raw.ptr == name.ptr and raw.len == name.len) or AsyncPrefixExpression.find(raw) == .is_async) {
|
||||
return try p.parseAsyncPrefixExpr(name_range, level);
|
||||
}
|
||||
},
|
||||
|
||||
.is_await => {
|
||||
switch (p.fn_or_arrow_data_parse.allow_await) {
|
||||
.forbid_all => {
|
||||
p.log.addRangeError(p.source, name_range, "The keyword \"await\" cannot be used here") catch unreachable;
|
||||
},
|
||||
.allow_expr => {
|
||||
if (AsyncPrefixExpression.find(raw) != .is_await) {
|
||||
p.log.addRangeError(p.source, name_range, "The keyword \"await\" cannot be escaped") catch unreachable;
|
||||
} else {
|
||||
if (p.fn_or_arrow_data_parse.is_top_level) {
|
||||
p.top_level_await_keyword = name_range;
|
||||
}
|
||||
|
||||
if (p.fn_or_arrow_data_parse.track_arrow_arg_errors) {
|
||||
p.fn_or_arrow_data_parse.arrow_arg_errors.invalid_expr_await = name_range;
|
||||
}
|
||||
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == T.t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Await{ .value = value }, loc);
|
||||
}
|
||||
},
|
||||
.allow_ident => {
|
||||
p.lexer.prev_token_was_await_keyword = true;
|
||||
p.lexer.await_keyword_loc = name_range.loc;
|
||||
p.lexer.fn_or_arrow_start_loc = p.fn_or_arrow_data_parse.needs_async_loc;
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
.is_yield => {
|
||||
switch (p.fn_or_arrow_data_parse.allow_yield) {
|
||||
.forbid_all => {
|
||||
p.log.addRangeError(p.source, name_range, "The keyword \"yield\" cannot be used here") catch unreachable;
|
||||
},
|
||||
.allow_expr => {
|
||||
if (AsyncPrefixExpression.find(raw) != .is_yield) {
|
||||
p.log.addRangeError(p.source, name_range, "The keyword \"yield\" cannot be escaped") catch unreachable;
|
||||
} else {
|
||||
if (level.gt(.assign)) {
|
||||
p.log.addRangeError(p.source, name_range, "Cannot use a \"yield\" here without parentheses") catch unreachable;
|
||||
}
|
||||
|
||||
if (p.fn_or_arrow_data_parse.track_arrow_arg_errors) {
|
||||
p.fn_or_arrow_data_parse.arrow_arg_errors.invalid_expr_yield = name_range;
|
||||
}
|
||||
|
||||
return p.parseYieldExpr(loc);
|
||||
}
|
||||
},
|
||||
// .allow_ident => {
|
||||
|
||||
// },
|
||||
else => {
|
||||
// Try to gracefully recover if "yield" is used in the wrong place
|
||||
if (!p.lexer.has_newline_before) {
|
||||
switch (p.lexer.token) {
|
||||
.t_null, .t_identifier, .t_false, .t_true, .t_numeric_literal, .t_big_integer_literal, .t_string_literal => {
|
||||
p.log.addRangeError(p.source, name_range, "Cannot use \"yield\" outside a generator function") catch unreachable;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
.none => {},
|
||||
}
|
||||
|
||||
// Handle the start of an arrow expression
|
||||
if (p.lexer.token == .t_equals_greater_than and level.lte(.assign)) {
|
||||
const ref = p.storeNameInRef(name) catch unreachable;
|
||||
var args = p.allocator.alloc(Arg, 1) catch unreachable;
|
||||
args[0] = Arg{ .binding = p.b(B.Identifier{
|
||||
.ref = ref,
|
||||
}, loc) };
|
||||
|
||||
_ = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||||
defer p.popScope();
|
||||
|
||||
var fn_or_arrow_data = FnOrArrowDataParse{
|
||||
.needs_async_loc = loc,
|
||||
};
|
||||
return p.newExpr(try p.parseArrowBody(args, &fn_or_arrow_data), loc);
|
||||
}
|
||||
|
||||
const ref = p.storeNameInRef(name) catch unreachable;
|
||||
|
||||
return Expr.initIdentifier(ref, loc);
|
||||
},
|
||||
.t_string_literal, .t_no_substitution_template_literal => {
|
||||
return try p.parseStringLiteral();
|
||||
},
|
||||
.t_template_head => {
|
||||
const head = try p.lexer.toEString();
|
||||
|
||||
const parts = try p.parseTemplateParts(false);
|
||||
|
||||
// Check if TemplateLiteral is unsupported. We don't care for this product.`
|
||||
// if ()
|
||||
|
||||
return p.newExpr(E.Template{
|
||||
.head = .{ .cooked = head },
|
||||
.parts = parts,
|
||||
}, loc);
|
||||
},
|
||||
.t_numeric_literal => {
|
||||
const value = p.newExpr(E.Number{ .value = p.lexer.number }, loc);
|
||||
// p.checkForLegacyOctalLiteral()
|
||||
try p.lexer.next();
|
||||
return value;
|
||||
},
|
||||
.t_big_integer_literal => {
|
||||
const value = p.lexer.identifier;
|
||||
// markSyntaxFeature bigInt
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.BigInt{ .value = value }, loc);
|
||||
},
|
||||
.t_slash, .t_slash_equals => {
|
||||
try p.lexer.scanRegExp();
|
||||
// always set regex_flags_start to null to make sure we don't accidentally use the wrong value later
|
||||
defer p.lexer.regex_flags_start = null;
|
||||
const value = p.lexer.raw();
|
||||
try p.lexer.next();
|
||||
|
||||
return p.newExpr(E.RegExp{ .value = value, .flags_offset = p.lexer.regex_flags_start }, loc);
|
||||
},
|
||||
.t_void => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{
|
||||
.op = .un_void,
|
||||
.value = value,
|
||||
}, loc);
|
||||
},
|
||||
.t_typeof => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_typeof, .value = value }, loc);
|
||||
},
|
||||
.t_delete => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
if (value.data == .e_index) {
|
||||
if (value.data.e_index.index.data == .e_private_identifier) {
|
||||
const private = value.data.e_index.index.data.e_private_identifier;
|
||||
const name = p.loadNameFromRef(private.ref);
|
||||
const range = logger.Range{ .loc = value.loc, .len = @as(i32, @intCast(name.len)) };
|
||||
p.log.addRangeErrorFmt(p.source, range, p.allocator, "Deleting the private name \"{s}\" is forbidden", .{name}) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_delete, .value = value }, loc);
|
||||
},
|
||||
.t_plus => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_pos, .value = value }, loc);
|
||||
},
|
||||
.t_minus => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_neg, .value = value }, loc);
|
||||
},
|
||||
.t_tilde => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_cpl, .value = value }, loc);
|
||||
},
|
||||
.t_exclamation => {
|
||||
try p.lexer.next();
|
||||
const value = try p.parseExpr(.prefix);
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
return p.newExpr(E.Unary{ .op = .un_not, .value = value }, loc);
|
||||
},
|
||||
.t_minus_minus => {
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.Unary{ .op = .un_pre_dec, .value = try p.parseExpr(.prefix) }, loc);
|
||||
},
|
||||
.t_plus_plus => {
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.Unary{ .op = .un_pre_inc, .value = try p.parseExpr(.prefix) }, loc);
|
||||
},
|
||||
.t_function => {
|
||||
return try p.parseFnExpr(loc, false, logger.Range.None);
|
||||
},
|
||||
.t_class => {
|
||||
const classKeyword = p.lexer.range();
|
||||
// markSyntaxFEatuer class
|
||||
try p.lexer.next();
|
||||
var name: ?js_ast.LocRef = null;
|
||||
|
||||
_ = p.pushScopeForParsePass(.class_name, loc) catch unreachable;
|
||||
|
||||
// Parse an optional class name
|
||||
if (p.lexer.token == .t_identifier) {
|
||||
const name_text = p.lexer.identifier;
|
||||
if (!is_typescript_enabled or !strings.eqlComptime(name_text, "implements")) {
|
||||
if (p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(name_text, "await")) {
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"await\" as an identifier here") catch unreachable;
|
||||
}
|
||||
|
||||
name = js_ast.LocRef{
|
||||
.loc = p.lexer.loc(),
|
||||
.ref = p.newSymbol(
|
||||
.other,
|
||||
name_text,
|
||||
) catch unreachable,
|
||||
};
|
||||
try p.lexer.next();
|
||||
}
|
||||
}
|
||||
|
||||
// Even anonymous classes can have TypeScript type parameters
|
||||
if (is_typescript_enabled) {
|
||||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_in_out_variance_annotations = true, .allow_const_modifier = true });
|
||||
}
|
||||
|
||||
const class = try p.parseClass(classKeyword, name, ParseClassOptions{});
|
||||
p.popScope();
|
||||
|
||||
return p.newExpr(class, loc);
|
||||
},
|
||||
.t_new => {
|
||||
try p.lexer.next();
|
||||
|
||||
// Special-case the weird "new.target" expression here
|
||||
if (p.lexer.token == .t_dot) {
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.token != .t_identifier or !strings.eqlComptime(p.lexer.raw(), "target")) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
const range = logger.Range{ .loc = loc, .len = p.lexer.range().end().start - loc.start };
|
||||
|
||||
try p.lexer.next();
|
||||
return p.newExpr(E.NewTarget{ .range = range }, loc);
|
||||
}
|
||||
|
||||
const target = try p.parseExprWithFlags(.member, flags);
|
||||
var args = ExprNodeList{};
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// Skip over TypeScript type arguments here if there are any
|
||||
if (p.lexer.token == .t_less_than) {
|
||||
_ = p.trySkipTypeScriptTypeArgumentsWithBacktracking();
|
||||
}
|
||||
}
|
||||
|
||||
var close_parens_loc = logger.Loc.Empty;
|
||||
if (p.lexer.token == .t_open_paren) {
|
||||
const call_args = try p.parseCallArgs();
|
||||
args = call_args.list;
|
||||
close_parens_loc = call_args.loc;
|
||||
}
|
||||
|
||||
return p.newExpr(E.New{
|
||||
.target = target,
|
||||
.args = args,
|
||||
.close_parens_loc = close_parens_loc,
|
||||
}, loc);
|
||||
},
|
||||
.t_open_bracket => {
|
||||
try p.lexer.next();
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var items = ListManaged(Expr).init(p.allocator);
|
||||
var self_errors = DeferredErrors{};
|
||||
var comma_after_spread = logger.Loc{};
|
||||
|
||||
// Allow "in" inside arrays
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
while (p.lexer.token != .t_close_bracket) {
|
||||
switch (p.lexer.token) {
|
||||
.t_comma => {
|
||||
items.append(Expr{ .data = Prefill.Data.EMissing, .loc = p.lexer.loc() }) catch unreachable;
|
||||
},
|
||||
.t_dot_dot_dot => {
|
||||
if (errors != null)
|
||||
errors.?.array_spread_feature = p.lexer.range();
|
||||
|
||||
const dots_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
items.append(
|
||||
p.newExpr(E.Spread{ .value = try p.parseExprOrBindings(.comma, &self_errors) }, dots_loc),
|
||||
) catch unreachable;
|
||||
|
||||
// Commas are not allowed here when destructuring
|
||||
if (p.lexer.token == .t_comma) {
|
||||
comma_after_spread = p.lexer.loc();
|
||||
}
|
||||
},
|
||||
else => {
|
||||
items.append(
|
||||
try p.parseExprOrBindings(.comma, &self_errors),
|
||||
) catch unreachable;
|
||||
},
|
||||
}
|
||||
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
const close_bracket_loc = p.lexer.loc();
|
||||
try p.lexer.expect(.t_close_bracket);
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
// Is this a binding pattern?
|
||||
if (p.willNeedBindingPattern()) {
|
||||
// noop
|
||||
} else if (errors == null) {
|
||||
// Is this an expression?
|
||||
p.logExprErrors(&self_errors);
|
||||
} else {
|
||||
// In this case, we can't distinguish between the two yet
|
||||
self_errors.mergeInto(errors.?);
|
||||
}
|
||||
return p.newExpr(E.Array{
|
||||
.items = ExprNodeList.fromList(items),
|
||||
.comma_after_spread = comma_after_spread.toNullable(),
|
||||
.is_single_line = is_single_line,
|
||||
.close_bracket_loc = close_bracket_loc,
|
||||
}, loc);
|
||||
},
|
||||
.t_open_brace => {
|
||||
try p.lexer.next();
|
||||
var is_single_line = !p.lexer.has_newline_before;
|
||||
var properties = ListManaged(G.Property).init(p.allocator);
|
||||
var self_errors = DeferredErrors{};
|
||||
var comma_after_spread: logger.Loc = logger.Loc{};
|
||||
|
||||
// Allow "in" inside object literals
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
while (p.lexer.token != .t_close_brace) {
|
||||
if (p.lexer.token == .t_dot_dot_dot) {
|
||||
try p.lexer.next();
|
||||
properties.append(G.Property{ .kind = .spread, .value = try p.parseExpr(.comma) }) catch unreachable;
|
||||
|
||||
// Commas are not allowed here when destructuring
|
||||
if (p.lexer.token == .t_comma) {
|
||||
comma_after_spread = p.lexer.loc();
|
||||
}
|
||||
} else {
|
||||
// This property may turn out to be a type in TypeScript, which should be ignored
|
||||
var propertyOpts = PropertyOpts{};
|
||||
if (try p.parseProperty(.normal, &propertyOpts, &self_errors)) |prop| {
|
||||
if (comptime Environment.allow_assert) {
|
||||
assert(prop.key != null or prop.value != null);
|
||||
}
|
||||
properties.append(prop) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.has_newline_before) {
|
||||
is_single_line = false;
|
||||
}
|
||||
|
||||
const close_brace_loc = p.lexer.loc();
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
if (p.willNeedBindingPattern()) {
|
||||
// Is this a binding pattern?
|
||||
} else if (errors == null) {
|
||||
// Is this an expression?
|
||||
p.logExprErrors(&self_errors);
|
||||
} else {
|
||||
// In this case, we can't distinguish between the two yet
|
||||
self_errors.mergeInto(errors.?);
|
||||
}
|
||||
|
||||
return p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromList(properties),
|
||||
.comma_after_spread = if (comma_after_spread.start > 0)
|
||||
comma_after_spread
|
||||
else
|
||||
null,
|
||||
.is_single_line = is_single_line,
|
||||
.close_brace_loc = close_brace_loc,
|
||||
}, loc);
|
||||
},
|
||||
.t_less_than => {
|
||||
// This is a very complicated and highly ambiguous area of TypeScript
|
||||
// syntax. Many similar-looking things are overloaded.
|
||||
//
|
||||
// TS:
|
||||
//
|
||||
// A type cast:
|
||||
// <A>(x)
|
||||
// <[]>(x)
|
||||
// <A[]>(x)
|
||||
//
|
||||
// An arrow function with type parameters:
|
||||
// <A>(x) => {}
|
||||
// <A, B>(x) => {}
|
||||
// <A = B>(x) => {}
|
||||
// <A extends B>(x) => {}
|
||||
//
|
||||
// TSX:
|
||||
//
|
||||
// A JSX element:
|
||||
// <A>(x) => {}</A>
|
||||
// <A extends>(x) => {}</A>
|
||||
// <A extends={false}>(x) => {}</A>
|
||||
//
|
||||
// An arrow function with type parameters:
|
||||
// <A, B>(x) => {}
|
||||
// <A extends B>(x) => {}
|
||||
//
|
||||
// A syntax error:
|
||||
// <[]>(x)
|
||||
// <A[]>(x)
|
||||
// <A>(x) => {}
|
||||
// <A = B>(x) => {}
|
||||
if (comptime is_typescript_enabled and is_jsx_enabled) {
|
||||
if (try TypeScript.isTSArrowFnJSX(p)) {
|
||||
_ = try p.skipTypeScriptTypeParameters(TypeParameterFlag{
|
||||
.allow_const_modifier = true,
|
||||
});
|
||||
try p.lexer.expect(.t_open_paren);
|
||||
return try p.parseParenExpr(loc, level, ParenExprOpts{ .force_arrow_fn = true });
|
||||
}
|
||||
}
|
||||
|
||||
if (is_jsx_enabled) {
|
||||
// Use NextInsideJSXElement() instead of Next() so we parse "<<" as "<"
|
||||
try p.lexer.nextInsideJSXElement();
|
||||
const element = try p.parseJSXElement(loc);
|
||||
|
||||
// The call to parseJSXElement() above doesn't consume the last
|
||||
// TGreaterThan because the caller knows what Next() function to call.
|
||||
// Use Next() instead of NextInsideJSXElement() here since the next
|
||||
// token is an expression.
|
||||
try p.lexer.next();
|
||||
return element;
|
||||
}
|
||||
|
||||
if (is_typescript_enabled) {
|
||||
// This is either an old-style type cast or a generic lambda function
|
||||
|
||||
// "<T>(x)"
|
||||
// "<T>(x) => {}"
|
||||
switch (p.trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking()) {
|
||||
.did_not_skip_anything => {},
|
||||
else => |result| {
|
||||
try p.lexer.expect(.t_open_paren);
|
||||
return p.parseParenExpr(loc, level, ParenExprOpts{
|
||||
.force_arrow_fn = result == .definitely_type_parameters,
|
||||
});
|
||||
},
|
||||
}
|
||||
|
||||
// "<T>x"
|
||||
try p.lexer.next();
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
try p.lexer.expectGreaterThan(false);
|
||||
return p.parsePrefix(level, errors, flags);
|
||||
}
|
||||
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
},
|
||||
.t_import => {
|
||||
try p.lexer.next();
|
||||
return p.parseImportExpr(loc, level);
|
||||
},
|
||||
else => {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
},
|
||||
}
|
||||
return error.SyntaxError;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Output = bun.Output;
|
||||
const assert = bun.assert;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const LocRef = js_ast.LocRef;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Arg = G.Arg;
|
||||
const Property = G.Property;
|
||||
|
||||
const Op = js_ast.Op;
|
||||
const Level = js_ast.Op.Level;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const AsyncPrefixExpression = js_parser.AsyncPrefixExpression;
|
||||
const DeferredErrors = js_parser.DeferredErrors;
|
||||
const FnOrArrowDataParse = js_parser.FnOrArrowDataParse;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const ParenExprOpts = js_parser.ParenExprOpts;
|
||||
const ParseClassOptions = js_parser.ParseClassOptions;
|
||||
const Prefill = js_parser.Prefill;
|
||||
const PropertyOpts = js_parser.PropertyOpts;
|
||||
const TypeParameterFlag = js_parser.TypeParameterFlag;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
const ListManaged = std.ArrayList;
|
||||
556
src/ast/parseProperty.zig
Normal file
556
src/ast/parseProperty.zig
Normal file
@@ -0,0 +1,556 @@
|
||||
pub fn ParseProperty(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
|
||||
pub fn parseProperty(p: *P, kind: Property.Kind, opts: *PropertyOpts, errors: ?*DeferredErrors) anyerror!?G.Property {
|
||||
var key: Expr = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_missing = E.Missing{} } };
|
||||
const key_range = p.lexer.range();
|
||||
var is_computed = false;
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_numeric_literal => {
|
||||
key = p.newExpr(E.Number{
|
||||
.value = p.lexer.number,
|
||||
}, p.lexer.loc());
|
||||
// p.checkForLegacyOctalLiteral()
|
||||
try p.lexer.next();
|
||||
},
|
||||
.t_string_literal => {
|
||||
key = try p.parseStringLiteral();
|
||||
},
|
||||
.t_big_integer_literal => {
|
||||
key = p.newExpr(E.BigInt{ .value = p.lexer.identifier }, p.lexer.loc());
|
||||
// markSyntaxFeature
|
||||
try p.lexer.next();
|
||||
},
|
||||
.t_private_identifier => {
|
||||
if (!opts.is_class or opts.ts_decorators.len > 0) {
|
||||
try p.lexer.expected(.t_identifier);
|
||||
}
|
||||
|
||||
key = p.newExpr(E.PrivateIdentifier{ .ref = p.storeNameInRef(p.lexer.identifier) catch unreachable }, p.lexer.loc());
|
||||
try p.lexer.next();
|
||||
},
|
||||
.t_open_bracket => {
|
||||
is_computed = true;
|
||||
// p.markSyntaxFeature(compat.objectExtensions, p.lexer.range())
|
||||
try p.lexer.next();
|
||||
const wasIdentifier = p.lexer.token == .t_identifier;
|
||||
const expr = try p.parseExpr(.comma);
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
|
||||
// Handle index signatures
|
||||
if (p.lexer.token == .t_colon and wasIdentifier and opts.is_class) {
|
||||
switch (expr.data) {
|
||||
.e_identifier => {
|
||||
try p.lexer.next();
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
try p.lexer.expect(.t_close_bracket);
|
||||
try p.lexer.expect(.t_colon);
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
// Skip this property entirely
|
||||
return null;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_close_bracket);
|
||||
key = expr;
|
||||
},
|
||||
.t_asterisk => {
|
||||
if (kind != .normal or opts.is_generator) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
opts.is_generator = true;
|
||||
return try p.parseProperty(.normal, opts, errors);
|
||||
},
|
||||
|
||||
else => {
|
||||
const name = p.lexer.identifier;
|
||||
const raw = p.lexer.raw();
|
||||
const name_range = p.lexer.range();
|
||||
|
||||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||||
try p.lexer.expect(.t_identifier);
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
// Support contextual keywords
|
||||
if (kind == .normal and !opts.is_generator) {
|
||||
// Does the following token look like a key?
|
||||
const couldBeModifierKeyword = p.lexer.isIdentifierOrKeyword() or switch (p.lexer.token) {
|
||||
.t_open_bracket, .t_numeric_literal, .t_string_literal, .t_asterisk, .t_private_identifier => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
// If so, check for a modifier keyword
|
||||
if (couldBeModifierKeyword) {
|
||||
// TODO: micro-optimization, use a smaller list for non-typescript files.
|
||||
if (js_lexer.PropertyModifierKeyword.List.get(name)) |keyword| {
|
||||
switch (keyword) {
|
||||
.p_get => {
|
||||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_get) {
|
||||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||||
return try p.parseProperty(.get, opts, null);
|
||||
}
|
||||
},
|
||||
|
||||
.p_set => {
|
||||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_set) {
|
||||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||||
return try p.parseProperty(.set, opts, null);
|
||||
}
|
||||
},
|
||||
.p_async => {
|
||||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_async and !p.lexer.has_newline_before) {
|
||||
opts.is_async = true;
|
||||
opts.async_range = name_range;
|
||||
|
||||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||||
return try p.parseProperty(kind, opts, null);
|
||||
}
|
||||
},
|
||||
.p_static => {
|
||||
if (!opts.is_static and !opts.is_async and opts.is_class and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_get) == .p_static) {
|
||||
opts.is_static = true;
|
||||
return try p.parseProperty(kind, opts, null);
|
||||
}
|
||||
},
|
||||
.p_declare => {
|
||||
// skip declare keyword entirely
|
||||
// https://github.com/oven-sh/bun/issues/1907
|
||||
if (opts.is_class and is_typescript_enabled and strings.eqlComptime(raw, "declare")) {
|
||||
const scope_index = p.scopes_in_order.items.len;
|
||||
if (try p.parseProperty(kind, opts, null)) |_prop| {
|
||||
var prop = _prop;
|
||||
if (prop.kind == .normal and prop.value == null and opts.ts_decorators.len > 0) {
|
||||
prop.kind = .declare;
|
||||
return prop;
|
||||
}
|
||||
}
|
||||
|
||||
p.discardScopesUpTo(scope_index);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
.p_abstract => {
|
||||
if (opts.is_class and is_typescript_enabled and !opts.is_ts_abstract and strings.eqlComptime(raw, "abstract")) {
|
||||
opts.is_ts_abstract = true;
|
||||
const scope_index = p.scopes_in_order.items.len;
|
||||
if (try p.parseProperty(kind, opts, null)) |_prop| {
|
||||
var prop = _prop;
|
||||
if (prop.kind == .normal and prop.value == null and opts.ts_decorators.len > 0) {
|
||||
prop.kind = .abstract;
|
||||
return prop;
|
||||
}
|
||||
}
|
||||
p.discardScopesUpTo(scope_index);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
.p_private, .p_protected, .p_public, .p_readonly, .p_override => {
|
||||
// Skip over TypeScript keywords
|
||||
if (opts.is_class and is_typescript_enabled and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == keyword) {
|
||||
return try p.parseProperty(kind, opts, null);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
} else if (p.lexer.token == .t_open_brace and strings.eqlComptime(name, "static")) {
|
||||
const loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
const old_fn_or_arrow_data_parse = p.fn_or_arrow_data_parse;
|
||||
p.fn_or_arrow_data_parse = .{
|
||||
.is_return_disallowed = true,
|
||||
.allow_super_property = true,
|
||||
.allow_await = .forbid_all,
|
||||
};
|
||||
|
||||
_ = try p.pushScopeForParsePass(.class_static_init, loc);
|
||||
var _parse_opts = ParseStatementOptions{};
|
||||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &_parse_opts);
|
||||
|
||||
p.popScope();
|
||||
|
||||
p.fn_or_arrow_data_parse = old_fn_or_arrow_data_parse;
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
|
||||
const block = p.allocator.create(
|
||||
G.ClassStaticBlock,
|
||||
) catch unreachable;
|
||||
|
||||
block.* = G.ClassStaticBlock{
|
||||
.stmts = js_ast.BabyList(Stmt).init(stmts),
|
||||
.loc = loc,
|
||||
};
|
||||
|
||||
return G.Property{
|
||||
.kind = .class_static_block,
|
||||
.class_static_block = block,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Handle invalid identifiers in property names
|
||||
// https://github.com/oven-sh/bun/issues/12039
|
||||
if (p.lexer.token == .t_syntax_error) {
|
||||
p.log.addRangeErrorFmt(p.source, name_range, p.allocator, "Unexpected {}", .{bun.fmt.quote(name)}) catch bun.outOfMemory();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
key = p.newExpr(E.String{ .data = name }, name_range.loc);
|
||||
|
||||
// Parse a shorthand property
|
||||
const isShorthandProperty = !opts.is_class and
|
||||
kind == .normal and
|
||||
p.lexer.token != .t_colon and
|
||||
p.lexer.token != .t_open_paren and
|
||||
p.lexer.token != .t_less_than and
|
||||
!opts.is_generator and
|
||||
!opts.is_async and
|
||||
!js_lexer.Keywords.has(name);
|
||||
|
||||
if (isShorthandProperty) {
|
||||
if ((p.fn_or_arrow_data_parse.allow_await != .allow_ident and
|
||||
strings.eqlComptime(name, "await")) or
|
||||
(p.fn_or_arrow_data_parse.allow_yield != .allow_ident and
|
||||
strings.eqlComptime(name, "yield")))
|
||||
{
|
||||
if (strings.eqlComptime(name, "await")) {
|
||||
p.log.addRangeError(p.source, name_range, "Cannot use \"await\" here") catch unreachable;
|
||||
} else {
|
||||
p.log.addRangeError(p.source, name_range, "Cannot use \"yield\" here") catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
const ref = p.storeNameInRef(name) catch unreachable;
|
||||
const value = p.newExpr(E.Identifier{ .ref = ref }, key.loc);
|
||||
|
||||
// Destructuring patterns have an optional default value
|
||||
var initializer: ?Expr = null;
|
||||
if (errors != null and p.lexer.token == .t_equals) {
|
||||
errors.?.invalid_expr_default_value = p.lexer.range();
|
||||
try p.lexer.next();
|
||||
initializer = try p.parseExpr(.comma);
|
||||
}
|
||||
|
||||
return G.Property{
|
||||
.kind = kind,
|
||||
.key = key,
|
||||
.value = value,
|
||||
.initializer = initializer,
|
||||
.flags = Flags.Property.init(.{
|
||||
.was_shorthand = true,
|
||||
}),
|
||||
};
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var has_type_parameters = false;
|
||||
var has_definite_assignment_assertion_operator = false;
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
if (opts.is_class) {
|
||||
if (p.lexer.token == .t_question) {
|
||||
// "class X { foo?: number }"
|
||||
// "class X { foo!: number }"
|
||||
try p.lexer.next();
|
||||
} else if (p.lexer.token == .t_exclamation and
|
||||
!p.lexer.has_newline_before and
|
||||
kind == .normal and
|
||||
!opts.is_async and
|
||||
!opts.is_generator)
|
||||
{
|
||||
// "class X { foo!: number }"
|
||||
try p.lexer.next();
|
||||
has_definite_assignment_assertion_operator = true;
|
||||
}
|
||||
}
|
||||
|
||||
// "class X { foo?<T>(): T }"
|
||||
// "const x = { foo<T>(): T {} }"
|
||||
if (!has_definite_assignment_assertion_operator) {
|
||||
has_type_parameters = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true }) != .did_not_skip_anything;
|
||||
}
|
||||
}
|
||||
|
||||
// Parse a class field with an optional initial value
|
||||
if (opts.is_class and
|
||||
kind == .normal and !opts.is_async and
|
||||
!opts.is_generator and
|
||||
p.lexer.token != .t_open_paren and
|
||||
!has_type_parameters and
|
||||
(p.lexer.token != .t_open_paren or has_definite_assignment_assertion_operator))
|
||||
{
|
||||
var initializer: ?Expr = null;
|
||||
var ts_metadata = TypeScript.Metadata.default;
|
||||
|
||||
// Forbid the names "constructor" and "prototype" in some cases
|
||||
if (!is_computed) {
|
||||
switch (key.data) {
|
||||
.e_string => |str| {
|
||||
if (str.eqlComptime("constructor") or (opts.is_static and str.eqlComptime("prototype"))) {
|
||||
// TODO: fmt error message to include string value.
|
||||
p.log.addRangeError(p.source, key_range, "Invalid field name") catch unreachable;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// Skip over types
|
||||
if (p.lexer.token == .t_colon) {
|
||||
try p.lexer.next();
|
||||
if (p.options.features.emit_decorator_metadata and opts.is_class and opts.ts_decorators.len > 0) {
|
||||
ts_metadata = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||||
} else {
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (p.lexer.token == .t_equals) {
|
||||
if (comptime is_typescript_enabled) {
|
||||
if (!opts.declare_range.isEmpty()) {
|
||||
try p.log.addRangeError(p.source, p.lexer.range(), "Class fields that use \"declare\" cannot be initialized");
|
||||
}
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
// "this" and "super" property access is allowed in field initializers
|
||||
const old_is_this_disallowed = p.fn_or_arrow_data_parse.is_this_disallowed;
|
||||
const old_allow_super_property = p.fn_or_arrow_data_parse.allow_super_property;
|
||||
p.fn_or_arrow_data_parse.is_this_disallowed = false;
|
||||
p.fn_or_arrow_data_parse.allow_super_property = true;
|
||||
|
||||
initializer = try p.parseExpr(.comma);
|
||||
|
||||
p.fn_or_arrow_data_parse.is_this_disallowed = old_is_this_disallowed;
|
||||
p.fn_or_arrow_data_parse.allow_super_property = old_allow_super_property;
|
||||
}
|
||||
|
||||
// Special-case private identifiers
|
||||
switch (key.data) {
|
||||
.e_private_identifier => |*private| {
|
||||
const name = p.loadNameFromRef(private.ref);
|
||||
if (strings.eqlComptime(name, "#constructor")) {
|
||||
p.log.addRangeError(p.source, key_range, "Invalid field name \"#constructor\"") catch unreachable;
|
||||
}
|
||||
|
||||
const declare: js_ast.Symbol.Kind = if (opts.is_static)
|
||||
.private_static_field
|
||||
else
|
||||
.private_field;
|
||||
|
||||
private.ref = p.declareSymbol(declare, key.loc, name) catch unreachable;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
.is_static = opts.is_static,
|
||||
}),
|
||||
.key = key,
|
||||
.initializer = initializer,
|
||||
.ts_metadata = ts_metadata,
|
||||
};
|
||||
}
|
||||
|
||||
// Parse a method expression
|
||||
if (p.lexer.token == .t_open_paren or kind != .normal or opts.is_class or opts.is_async or opts.is_generator) {
|
||||
if (p.lexer.token == .t_open_paren and kind != .get and kind != .set) {
|
||||
// markSyntaxFeature object extensions
|
||||
}
|
||||
|
||||
const loc = p.lexer.loc();
|
||||
const scope_index = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||||
var is_constructor = false;
|
||||
|
||||
// Forbid the names "constructor" and "prototype" in some cases
|
||||
if (opts.is_class and !is_computed) {
|
||||
switch (key.data) {
|
||||
.e_string => |str| {
|
||||
if (!opts.is_static and str.eqlComptime("constructor")) {
|
||||
if (kind == .get) {
|
||||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a getter") catch unreachable;
|
||||
} else if (kind == .set) {
|
||||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a setter") catch unreachable;
|
||||
} else if (opts.is_async) {
|
||||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be an async function") catch unreachable;
|
||||
} else if (opts.is_generator) {
|
||||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a generator function") catch unreachable;
|
||||
} else {
|
||||
is_constructor = true;
|
||||
}
|
||||
} else if (opts.is_static and str.eqlComptime("prototype")) {
|
||||
p.log.addRangeError(p.source, key_range, "Invalid static method name \"prototype\"") catch unreachable;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
var func = try p.parseFn(null, FnOrArrowDataParse{
|
||||
.async_range = opts.async_range,
|
||||
.needs_async_loc = key.loc,
|
||||
.has_async_range = !opts.async_range.isEmpty(),
|
||||
.allow_await = if (opts.is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
.allow_yield = if (opts.is_generator) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||||
.allow_super_call = opts.class_has_extends and is_constructor,
|
||||
.allow_super_property = true,
|
||||
.allow_ts_decorators = opts.allow_ts_decorators,
|
||||
.is_constructor = is_constructor,
|
||||
.has_decorators = opts.ts_decorators.len > 0 or (opts.has_class_decorators and is_constructor),
|
||||
|
||||
// Only allow omitting the body if we're parsing TypeScript class
|
||||
.allow_missing_body_for_type_script = is_typescript_enabled and opts.is_class,
|
||||
});
|
||||
|
||||
opts.has_argument_decorators = opts.has_argument_decorators or p.fn_or_arrow_data_parse.has_argument_decorators;
|
||||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||||
|
||||
// "class Foo { foo(): void; foo(): void {} }"
|
||||
if (func.flags.contains(.is_forward_declaration)) {
|
||||
// Skip this property entirely
|
||||
p.popAndDiscardScope(scope_index);
|
||||
return null;
|
||||
}
|
||||
|
||||
p.popScope();
|
||||
func.flags.insert(.is_unique_formal_parameters);
|
||||
const value = p.newExpr(E.Function{ .func = func }, loc);
|
||||
|
||||
// Enforce argument rules for accessors
|
||||
switch (kind) {
|
||||
.get => {
|
||||
if (func.args.len > 0) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, func.args[0].binding.loc);
|
||||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Getter {s} must have zero arguments", .{p.keyNameForError(key)}) catch unreachable;
|
||||
}
|
||||
},
|
||||
.set => {
|
||||
if (func.args.len != 1) {
|
||||
var r = js_lexer.rangeOfIdentifier(p.source, if (func.args.len > 0) func.args[0].binding.loc else loc);
|
||||
if (func.args.len > 1) {
|
||||
r = js_lexer.rangeOfIdentifier(p.source, func.args[1].binding.loc);
|
||||
}
|
||||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Setter {s} must have exactly 1 argument (there are {d})", .{ p.keyNameForError(key), func.args.len }) catch unreachable;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
// Special-case private identifiers
|
||||
switch (key.data) {
|
||||
.e_private_identifier => |*private| {
|
||||
const declare: Symbol.Kind = switch (kind) {
|
||||
.get => if (opts.is_static)
|
||||
.private_static_get
|
||||
else
|
||||
.private_get,
|
||||
|
||||
.set => if (opts.is_static)
|
||||
.private_static_set
|
||||
else
|
||||
.private_set,
|
||||
else => if (opts.is_static)
|
||||
.private_static_method
|
||||
else
|
||||
.private_method,
|
||||
};
|
||||
|
||||
const name = p.loadNameFromRef(private.ref);
|
||||
if (strings.eqlComptime(name, "#constructor")) {
|
||||
p.log.addRangeError(p.source, key_range, "Invalid method name \"#constructor\"") catch unreachable;
|
||||
}
|
||||
private.ref = p.declareSymbol(declare, key.loc, name) catch unreachable;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return G.Property{
|
||||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
.is_method = true,
|
||||
.is_static = opts.is_static,
|
||||
}),
|
||||
.key = key,
|
||||
.value = value,
|
||||
.ts_metadata = .m_function,
|
||||
};
|
||||
}
|
||||
|
||||
// Parse an object key/value pair
|
||||
try p.lexer.expect(.t_colon);
|
||||
const value = try p.parseExprOrBindings(.comma, errors);
|
||||
|
||||
return G.Property{
|
||||
.kind = kind,
|
||||
.flags = Flags.Property.init(.{
|
||||
.is_computed = is_computed,
|
||||
}),
|
||||
.key = key,
|
||||
.value = value,
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const Flags = js_ast.Flags;
|
||||
const Stmt = js_ast.Stmt;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Property = G.Property;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const AwaitOrYield = js_parser.AwaitOrYield;
|
||||
const DeferredErrors = js_parser.DeferredErrors;
|
||||
const FnOrArrowDataParse = js_parser.FnOrArrowDataParse;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const ParseStatementOptions = js_parser.ParseStatementOptions;
|
||||
const PropertyOpts = js_parser.PropertyOpts;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const options = js_parser.options;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
1368
src/ast/parseStmt.zig
Normal file
1368
src/ast/parseStmt.zig
Normal file
File diff suppressed because it is too large
Load Diff
809
src/ast/parseSuffix.zig
Normal file
809
src/ast/parseSuffix.zig
Normal file
@@ -0,0 +1,809 @@
|
||||
pub fn ParseSuffix(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
|
||||
pub fn parseSuffix(noalias p: *P, _left: Expr, level: Level, noalias errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||||
var left = _left;
|
||||
var optional_chain: ?js_ast.OptionalChain = null;
|
||||
while (true) {
|
||||
if (p.lexer.loc().start == p.after_arrow_body_loc.start) {
|
||||
while (true) {
|
||||
switch (p.lexer.token) {
|
||||
.t_comma => {
|
||||
if (level.gte(.comma)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{
|
||||
.op = .bin_comma,
|
||||
.left = left,
|
||||
.right = try p.parseExpr(.comma),
|
||||
}, left.loc);
|
||||
},
|
||||
else => {
|
||||
return left;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime is_typescript_enabled) {
|
||||
// Stop now if this token is forbidden to follow a TypeScript "as" cast
|
||||
if (p.forbid_suffix_after_as_loc.start > -1 and p.lexer.loc().start == p.forbid_suffix_after_as_loc.start) {
|
||||
return left;
|
||||
}
|
||||
}
|
||||
|
||||
// Reset the optional chain flag by default. That way we won't accidentally
|
||||
// treat "c.d" as OptionalChainContinue in "a?.b + c.d".
|
||||
const old_optional_chain = optional_chain;
|
||||
optional_chain = null;
|
||||
switch (p.lexer.token) {
|
||||
.t_dot => {
|
||||
try p.lexer.next();
|
||||
if (p.lexer.token == .t_private_identifier and p.allow_private_identifiers) {
|
||||
// "a.#b"
|
||||
// "a?.b.#c"
|
||||
switch (left.data) {
|
||||
.e_super => {
|
||||
try p.lexer.expected(.t_identifier);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
const name = p.lexer.identifier;
|
||||
const name_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
const ref = p.storeNameInRef(name) catch unreachable;
|
||||
left = p.newExpr(E.Index{
|
||||
.target = left,
|
||||
.index = p.newExpr(
|
||||
E.PrivateIdentifier{
|
||||
.ref = ref,
|
||||
},
|
||||
name_loc,
|
||||
),
|
||||
.optional_chain = old_optional_chain,
|
||||
}, left.loc);
|
||||
} else {
|
||||
// "a.b"
|
||||
// "a?.b.c"
|
||||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||||
try p.lexer.expect(.t_identifier);
|
||||
}
|
||||
|
||||
const name = p.lexer.identifier;
|
||||
const name_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
left = p.newExpr(E.Dot{ .target = left, .name = name, .name_loc = name_loc, .optional_chain = old_optional_chain }, left.loc);
|
||||
}
|
||||
|
||||
optional_chain = old_optional_chain;
|
||||
},
|
||||
.t_question_dot => {
|
||||
try p.lexer.next();
|
||||
var optional_start: ?js_ast.OptionalChain = js_ast.OptionalChain.start;
|
||||
|
||||
// Remove unnecessary optional chains
|
||||
if (p.options.features.minify_syntax) {
|
||||
const result = SideEffects.toNullOrUndefined(p, left.data);
|
||||
if (result.ok and !result.value) {
|
||||
optional_start = null;
|
||||
}
|
||||
}
|
||||
|
||||
switch (p.lexer.token) {
|
||||
.t_open_bracket => {
|
||||
// "a?.[b]"
|
||||
try p.lexer.next();
|
||||
|
||||
// allow "in" inside the brackets;
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
const index = try p.parseExpr(.lowest);
|
||||
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
try p.lexer.expect(.t_close_bracket);
|
||||
left = p.newExpr(
|
||||
E.Index{ .target = left, .index = index, .optional_chain = optional_start },
|
||||
left.loc,
|
||||
);
|
||||
},
|
||||
|
||||
.t_open_paren => {
|
||||
// "a?.()"
|
||||
if (level.gte(.call)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
const list_loc = try p.parseCallArgs();
|
||||
left = p.newExpr(E.Call{
|
||||
.target = left,
|
||||
.args = list_loc.list,
|
||||
.close_paren_loc = list_loc.loc,
|
||||
.optional_chain = optional_start,
|
||||
}, left.loc);
|
||||
},
|
||||
.t_less_than, .t_less_than_less_than => {
|
||||
// "a?.<T>()"
|
||||
if (comptime !is_typescript_enabled) {
|
||||
try p.lexer.expected(.t_identifier);
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
_ = try p.skipTypeScriptTypeArguments(false);
|
||||
if (p.lexer.token != .t_open_paren) {
|
||||
try p.lexer.expected(.t_open_paren);
|
||||
}
|
||||
|
||||
if (level.gte(.call)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
const list_loc = try p.parseCallArgs();
|
||||
left = p.newExpr(E.Call{
|
||||
.target = left,
|
||||
.args = list_loc.list,
|
||||
.close_paren_loc = list_loc.loc,
|
||||
.optional_chain = optional_start,
|
||||
}, left.loc);
|
||||
},
|
||||
else => {
|
||||
if (p.lexer.token == .t_private_identifier and p.allow_private_identifiers) {
|
||||
// "a?.#b"
|
||||
const name = p.lexer.identifier;
|
||||
const name_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
const ref = p.storeNameInRef(name) catch unreachable;
|
||||
left = p.newExpr(E.Index{
|
||||
.target = left,
|
||||
.index = p.newExpr(
|
||||
E.PrivateIdentifier{
|
||||
.ref = ref,
|
||||
},
|
||||
name_loc,
|
||||
),
|
||||
.optional_chain = optional_start,
|
||||
}, left.loc);
|
||||
} else {
|
||||
// "a?.b"
|
||||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||||
try p.lexer.expect(.t_identifier);
|
||||
}
|
||||
const name = p.lexer.identifier;
|
||||
const name_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
left = p.newExpr(E.Dot{
|
||||
.target = left,
|
||||
.name = name,
|
||||
.name_loc = name_loc,
|
||||
.optional_chain = optional_start,
|
||||
}, left.loc);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Only continue if we have started
|
||||
if ((optional_start orelse .continuation) == .start) {
|
||||
optional_chain = .continuation;
|
||||
}
|
||||
},
|
||||
.t_no_substitution_template_literal => {
|
||||
if (old_optional_chain != null) {
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||||
}
|
||||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||||
const head = p.lexer.rawTemplateContents();
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Template{
|
||||
.tag = left,
|
||||
.head = .{ .raw = head },
|
||||
}, left.loc);
|
||||
},
|
||||
.t_template_head => {
|
||||
if (old_optional_chain != null) {
|
||||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||||
}
|
||||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||||
const head = p.lexer.rawTemplateContents();
|
||||
const partsGroup = try p.parseTemplateParts(true);
|
||||
const tag = left;
|
||||
left = p.newExpr(E.Template{
|
||||
.tag = tag,
|
||||
.head = .{ .raw = head },
|
||||
.parts = partsGroup,
|
||||
}, left.loc);
|
||||
},
|
||||
.t_open_bracket => {
|
||||
// When parsing a decorator, ignore EIndex expressions since they may be
|
||||
// part of a computed property:
|
||||
//
|
||||
// class Foo {
|
||||
// @foo ['computed']() {}
|
||||
// }
|
||||
//
|
||||
// This matches the behavior of the TypeScript compiler.
|
||||
if (flags == .ts_decorator) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
// Allow "in" inside the brackets
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
const index = try p.parseExpr(.lowest);
|
||||
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
try p.lexer.expect(.t_close_bracket);
|
||||
|
||||
left = p.newExpr(E.Index{
|
||||
.target = left,
|
||||
.index = index,
|
||||
.optional_chain = old_optional_chain,
|
||||
}, left.loc);
|
||||
optional_chain = old_optional_chain;
|
||||
},
|
||||
.t_open_paren => {
|
||||
if (level.gte(.call)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
const list_loc = try p.parseCallArgs();
|
||||
left = p.newExpr(
|
||||
E.Call{
|
||||
.target = left,
|
||||
.args = list_loc.list,
|
||||
.close_paren_loc = list_loc.loc,
|
||||
.optional_chain = old_optional_chain,
|
||||
},
|
||||
left.loc,
|
||||
);
|
||||
optional_chain = old_optional_chain;
|
||||
},
|
||||
.t_question => {
|
||||
if (level.gte(.conditional)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
|
||||
// Stop now if we're parsing one of these:
|
||||
// "(a?) => {}"
|
||||
// "(a?: b) => {}"
|
||||
// "(a?, b?) => {}"
|
||||
if (is_typescript_enabled and left.loc.start == p.latest_arrow_arg_loc.start and (p.lexer.token == .t_colon or
|
||||
p.lexer.token == .t_close_paren or p.lexer.token == .t_comma))
|
||||
{
|
||||
if (errors == null) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
errors.?.invalid_expr_after_question = p.lexer.range();
|
||||
return left;
|
||||
}
|
||||
|
||||
// Allow "in" in between "?" and ":"
|
||||
const old_allow_in = p.allow_in;
|
||||
p.allow_in = true;
|
||||
|
||||
const yes = try p.parseExpr(.comma);
|
||||
|
||||
p.allow_in = old_allow_in;
|
||||
|
||||
try p.lexer.expect(.t_colon);
|
||||
const no = try p.parseExpr(.comma);
|
||||
|
||||
left = p.newExpr(E.If{
|
||||
.test_ = left,
|
||||
.yes = yes,
|
||||
.no = no,
|
||||
}, left.loc);
|
||||
},
|
||||
.t_exclamation => {
|
||||
// Skip over TypeScript non-null assertions
|
||||
if (p.lexer.has_newline_before) {
|
||||
return left;
|
||||
}
|
||||
|
||||
if (!is_typescript_enabled) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
optional_chain = old_optional_chain;
|
||||
},
|
||||
.t_minus_minus => {
|
||||
if (p.lexer.has_newline_before or level.gte(.postfix)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Unary{ .op = .un_post_dec, .value = left }, left.loc);
|
||||
},
|
||||
.t_plus_plus => {
|
||||
if (p.lexer.has_newline_before or level.gte(.postfix)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Unary{ .op = .un_post_inc, .value = left }, left.loc);
|
||||
},
|
||||
.t_comma => {
|
||||
if (level.gte(.comma)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_comma, .left = left, .right = try p.parseExpr(.comma) }, left.loc);
|
||||
},
|
||||
.t_plus => {
|
||||
if (level.gte(.add)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_add, .left = left, .right = try p.parseExpr(.add) }, left.loc);
|
||||
},
|
||||
.t_plus_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_add_assign, .left = left, .right = try p.parseExpr(@as(Op.Level, @enumFromInt(@intFromEnum(Op.Level.assign) - 1))) }, left.loc);
|
||||
},
|
||||
.t_minus => {
|
||||
if (level.gte(.add)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_sub, .left = left, .right = try p.parseExpr(.add) }, left.loc);
|
||||
},
|
||||
.t_minus_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_sub_assign, .left = left, .right = try p.parseExpr(Op.Level.sub(Op.Level.assign, 1)) }, left.loc);
|
||||
},
|
||||
.t_asterisk => {
|
||||
if (level.gte(.multiply)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_mul, .left = left, .right = try p.parseExpr(.multiply) }, left.loc);
|
||||
},
|
||||
.t_asterisk_asterisk => {
|
||||
if (level.gte(.exponentiation)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_pow, .left = left, .right = try p.parseExpr(Op.Level.exponentiation.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_asterisk_asterisk_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_pow_assign, .left = left, .right = try p.parseExpr(Op.Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_asterisk_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_mul_assign, .left = left, .right = try p.parseExpr(Op.Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_percent => {
|
||||
if (level.gte(.multiply)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_rem, .left = left, .right = try p.parseExpr(Op.Level.multiply) }, left.loc);
|
||||
},
|
||||
.t_percent_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_rem_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_slash => {
|
||||
if (level.gte(.multiply)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_div, .left = left, .right = try p.parseExpr(Level.multiply) }, left.loc);
|
||||
},
|
||||
.t_slash_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_div_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_equals_equals => {
|
||||
if (level.gte(.equals)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_loose_eq, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||||
},
|
||||
.t_exclamation_equals => {
|
||||
if (level.gte(.equals)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_loose_ne, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||||
},
|
||||
.t_equals_equals_equals => {
|
||||
if (level.gte(.equals)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_strict_eq, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||||
},
|
||||
.t_exclamation_equals_equals => {
|
||||
if (level.gte(.equals)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_strict_ne, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||||
},
|
||||
.t_less_than => {
|
||||
// TypeScript allows type arguments to be specified with angle brackets
|
||||
// inside an expression. Unlike in other languages, this unfortunately
|
||||
// appears to require backtracking to parse.
|
||||
if (is_typescript_enabled and p.trySkipTypeScriptTypeArgumentsWithBacktracking()) {
|
||||
optional_chain = old_optional_chain;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (level.gte(.compare)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_lt, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
.t_less_than_equals => {
|
||||
if (level.gte(.compare)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_le, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
.t_greater_than => {
|
||||
if (level.gte(.compare)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_gt, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
.t_greater_than_equals => {
|
||||
if (level.gte(.compare)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_ge, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
.t_less_than_less_than => {
|
||||
// TypeScript allows type arguments to be specified with angle brackets
|
||||
// inside an expression. Unlike in other languages, this unfortunately
|
||||
// appears to require backtracking to parse.
|
||||
if (is_typescript_enabled and p.trySkipTypeScriptTypeArgumentsWithBacktracking()) {
|
||||
optional_chain = old_optional_chain;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (level.gte(.shift)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_shl, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||||
},
|
||||
.t_less_than_less_than_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_shl_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_greater_than_greater_than => {
|
||||
if (level.gte(.shift)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_shr, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||||
},
|
||||
.t_greater_than_greater_than_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_shr_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_greater_than_greater_than_greater_than => {
|
||||
if (level.gte(.shift)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_u_shr, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||||
},
|
||||
.t_greater_than_greater_than_greater_than_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_u_shr_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_question_question => {
|
||||
if (level.gte(.nullish_coalescing)) {
|
||||
return left;
|
||||
}
|
||||
try p.lexer.next();
|
||||
const prev = left;
|
||||
left = p.newExpr(E.Binary{ .op = .bin_nullish_coalescing, .left = prev, .right = try p.parseExpr(.nullish_coalescing) }, left.loc);
|
||||
},
|
||||
.t_question_question_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_nullish_coalescing_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_bar_bar => {
|
||||
if (level.gte(.logical_or)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
// Prevent "||" inside "??" from the right
|
||||
if (level.eql(.nullish_coalescing)) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
const right = try p.parseExpr(.logical_or);
|
||||
left = p.newExpr(E.Binary{ .op = Op.Code.bin_logical_or, .left = left, .right = right }, left.loc);
|
||||
|
||||
if (level.lt(.nullish_coalescing)) {
|
||||
left = try p.parseSuffix(left, Level.nullish_coalescing.addF(1), null, flags);
|
||||
|
||||
if (p.lexer.token == .t_question_question) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
}
|
||||
},
|
||||
.t_bar_bar_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_logical_or_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_ampersand_ampersand => {
|
||||
if (level.gte(.logical_and)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
// Prevent "&&" inside "??" from the right
|
||||
if (level.eql(.nullish_coalescing)) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_logical_and, .left = left, .right = try p.parseExpr(.logical_and) }, left.loc);
|
||||
|
||||
// Prevent "&&" inside "??" from the left
|
||||
if (level.lt(.nullish_coalescing)) {
|
||||
left = try p.parseSuffix(left, Level.nullish_coalescing.addF(1), null, flags);
|
||||
|
||||
if (p.lexer.token == .t_question_question) {
|
||||
try p.lexer.unexpected();
|
||||
return error.SyntaxError;
|
||||
}
|
||||
}
|
||||
},
|
||||
.t_ampersand_ampersand_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_logical_and_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_bar => {
|
||||
if (level.gte(.bitwise_or)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_or, .left = left, .right = try p.parseExpr(.bitwise_or) }, left.loc);
|
||||
},
|
||||
.t_bar_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_or_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_ampersand => {
|
||||
if (level.gte(.bitwise_and)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_and, .left = left, .right = try p.parseExpr(.bitwise_and) }, left.loc);
|
||||
},
|
||||
.t_ampersand_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_and_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_caret => {
|
||||
if (level.gte(.bitwise_xor)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_xor, .left = left, .right = try p.parseExpr(.bitwise_xor) }, left.loc);
|
||||
},
|
||||
.t_caret_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_xor_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_equals => {
|
||||
if (level.gte(.assign)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
|
||||
left = p.newExpr(E.Binary{ .op = .bin_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||||
},
|
||||
.t_in => {
|
||||
if (level.gte(.compare) or !p.allow_in) {
|
||||
return left;
|
||||
}
|
||||
|
||||
// Warn about "!a in b" instead of "!(a in b)"
|
||||
switch (left.data) {
|
||||
.e_unary => |unary| {
|
||||
if (unary.op == .un_not) {
|
||||
// TODO:
|
||||
// p.log.addRangeWarning(source: ?Source, r: Range, text: string)
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_in, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
.t_instanceof => {
|
||||
if (level.gte(.compare)) {
|
||||
return left;
|
||||
}
|
||||
|
||||
// Warn about "!a instanceof b" instead of "!(a instanceof b)". Here's an
|
||||
// example of code with this problem: https://github.com/mrdoob/three.js/pull/11182.
|
||||
if (!p.options.suppress_warnings_about_weird_code) {
|
||||
switch (left.data) {
|
||||
.e_unary => |unary| {
|
||||
if (unary.op == .un_not) {
|
||||
// TODO:
|
||||
// p.log.addRangeWarning(source: ?Source, r: Range, text: string)
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
try p.lexer.next();
|
||||
left = p.newExpr(E.Binary{ .op = .bin_instanceof, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||||
},
|
||||
else => {
|
||||
// Handle the TypeScript "as" operator
|
||||
// Handle the TypeScript "satisfies" operator
|
||||
if (is_typescript_enabled and level.lt(.compare) and !p.lexer.has_newline_before and (p.lexer.isContextualKeyword("as") or p.lexer.isContextualKeyword("satisfies"))) {
|
||||
try p.lexer.next();
|
||||
try p.skipTypeScriptType(.lowest);
|
||||
|
||||
// These tokens are not allowed to follow a cast expression. This isn't
|
||||
// an outright error because it may be on a new line, in which case it's
|
||||
// the start of a new expression when it's after a cast:
|
||||
//
|
||||
// x = y as z
|
||||
// (something);
|
||||
//
|
||||
switch (p.lexer.token) {
|
||||
.t_plus_plus,
|
||||
.t_minus_minus,
|
||||
.t_no_substitution_template_literal,
|
||||
.t_template_head,
|
||||
.t_open_paren,
|
||||
.t_open_bracket,
|
||||
.t_question_dot,
|
||||
=> {
|
||||
p.forbid_suffix_after_as_loc = p.lexer.loc();
|
||||
return left;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (p.lexer.token.isAssign()) {
|
||||
p.forbid_suffix_after_as_loc = p.lexer.loc();
|
||||
return left;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
return left;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
|
||||
const Op = js_ast.Op;
|
||||
const Level = js_ast.Op.Level;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const DeferredErrors = js_parser.DeferredErrors;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const SideEffects = js_parser.SideEffects;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
const options = js_parser.options;
|
||||
458
src/ast/parseTypescript.zig
Normal file
458
src/ast/parseTypescript.zig
Normal file
@@ -0,0 +1,458 @@
|
||||
pub fn ParseTypescript(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
const is_typescript_enabled = P.is_typescript_enabled;
|
||||
|
||||
pub fn parseTypeScriptDecorators(p: *P) ![]ExprNodeIndex {
|
||||
if (!is_typescript_enabled) {
|
||||
return &([_]ExprNodeIndex{});
|
||||
}
|
||||
|
||||
var decorators = ListManaged(ExprNodeIndex).init(p.allocator);
|
||||
while (p.lexer.token == T.t_at) {
|
||||
try p.lexer.next();
|
||||
|
||||
// Parse a new/call expression with "exprFlagTSDecorator" so we ignore
|
||||
// EIndex expressions, since they may be part of a computed property:
|
||||
//
|
||||
// class Foo {
|
||||
// @foo ['computed']() {}
|
||||
// }
|
||||
//
|
||||
// This matches the behavior of the TypeScript compiler.
|
||||
try decorators.append(try p.parseExprWithFlags(.new, Expr.EFlags.ts_decorator));
|
||||
}
|
||||
|
||||
return decorators.items;
|
||||
}
|
||||
|
||||
pub fn parseTypeScriptNamespaceStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||||
// "namespace foo {}";
|
||||
const name_loc = p.lexer.loc();
|
||||
const name_text = p.lexer.identifier;
|
||||
try p.lexer.next();
|
||||
|
||||
// Generate the namespace object
|
||||
const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, false);
|
||||
const exported_members = ts_namespace.exported_members;
|
||||
const ns_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members };
|
||||
|
||||
// Declare the namespace and create the scope
|
||||
var name = LocRef{ .loc = name_loc, .ref = null };
|
||||
const scope_index = try p.pushScopeForParsePass(.entry, loc);
|
||||
p.current_scope.ts_namespace = ts_namespace;
|
||||
|
||||
const old_has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace;
|
||||
p.has_non_local_export_declare_inside_namespace = false;
|
||||
|
||||
// Parse the statements inside the namespace
|
||||
var stmts: ListManaged(Stmt) = ListManaged(Stmt).init(p.allocator);
|
||||
if (p.lexer.token == .t_dot) {
|
||||
const dot_loc = p.lexer.loc();
|
||||
try p.lexer.next();
|
||||
|
||||
var _opts = ParseStatementOptions{
|
||||
.is_export = true,
|
||||
.is_namespace_scope = true,
|
||||
.is_typescript_declare = opts.is_typescript_declare,
|
||||
};
|
||||
stmts.append(try p.parseTypeScriptNamespaceStmt(dot_loc, &_opts)) catch unreachable;
|
||||
} else if (opts.is_typescript_declare and p.lexer.token != .t_open_brace) {
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
} else {
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
var _opts = ParseStatementOptions{
|
||||
.is_namespace_scope = true,
|
||||
.is_typescript_declare = opts.is_typescript_declare,
|
||||
};
|
||||
stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, try p.parseStmtsUpTo(.t_close_brace, &_opts));
|
||||
try p.lexer.next();
|
||||
}
|
||||
const has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace;
|
||||
p.has_non_local_export_declare_inside_namespace = old_has_non_local_export_declare_inside_namespace;
|
||||
|
||||
// Add any exported members from this namespace's body as members of the
|
||||
// associated namespace object.
|
||||
for (stmts.items) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_function => |func| {
|
||||
if (func.func.flags.contains(.is_export)) {
|
||||
const locref = func.func.name.?;
|
||||
const fn_name = p.symbols.items[locref.ref.?.inner_index].original_name;
|
||||
try exported_members.put(p.allocator, fn_name, .{
|
||||
.loc = locref.loc,
|
||||
.data = .property,
|
||||
});
|
||||
try p.ref_to_ts_namespace_member.put(
|
||||
p.allocator,
|
||||
locref.ref.?,
|
||||
.property,
|
||||
);
|
||||
}
|
||||
},
|
||||
.s_class => |class| {
|
||||
if (class.is_export) {
|
||||
const locref = class.class.class_name.?;
|
||||
const class_name = p.symbols.items[locref.ref.?.inner_index].original_name;
|
||||
try exported_members.put(p.allocator, class_name, .{
|
||||
.loc = locref.loc,
|
||||
.data = .property,
|
||||
});
|
||||
try p.ref_to_ts_namespace_member.put(
|
||||
p.allocator,
|
||||
locref.ref.?,
|
||||
.property,
|
||||
);
|
||||
}
|
||||
},
|
||||
inline .s_namespace, .s_enum => |ns| {
|
||||
if (ns.is_export) {
|
||||
if (p.ref_to_ts_namespace_member.get(ns.name.ref.?)) |member_data| {
|
||||
try exported_members.put(
|
||||
p.allocator,
|
||||
p.symbols.items[ns.name.ref.?.inner_index].original_name,
|
||||
.{
|
||||
.data = member_data,
|
||||
.loc = ns.name.loc,
|
||||
},
|
||||
);
|
||||
try p.ref_to_ts_namespace_member.put(
|
||||
p.allocator,
|
||||
ns.name.ref.?,
|
||||
member_data,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_local => |local| {
|
||||
if (local.is_export) {
|
||||
for (local.decls.slice()) |decl| {
|
||||
try p.defineExportedNamespaceBinding(
|
||||
exported_members,
|
||||
decl.binding,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
// Import assignments may be only used in type expressions, not value
|
||||
// expressions. If this is the case, the TypeScript compiler removes
|
||||
// them entirely from the output. That can cause the namespace itself
|
||||
// to be considered empty and thus be removed.
|
||||
var import_equal_count: usize = 0;
|
||||
for (stmts.items) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_local => |local| {
|
||||
if (local.was_ts_import_equals and !local.is_export) {
|
||||
import_equal_count += 1;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
// TypeScript omits namespaces without values. These namespaces
|
||||
// are only allowed to be used in type expressions. They are
|
||||
// allowed to be exported, but can also only be used in type
|
||||
// expressions when imported. So we shouldn't count them as a
|
||||
// real export either.
|
||||
//
|
||||
// TypeScript also strangely counts namespaces containing only
|
||||
// "export declare" statements as non-empty even though "declare"
|
||||
// statements are only type annotations. We cannot omit the namespace
|
||||
// in that case. See https://github.com/evanw/esbuild/issues/1158.
|
||||
if ((stmts.items.len == import_equal_count and !has_non_local_export_declare_inside_namespace) or opts.is_typescript_declare) {
|
||||
p.popAndDiscardScope(scope_index);
|
||||
if (opts.is_module_scope) {
|
||||
p.local_type_names.put(p.allocator, name_text, true) catch unreachable;
|
||||
}
|
||||
return p.s(S.TypeScript{}, loc);
|
||||
}
|
||||
|
||||
var arg_ref = Ref.None;
|
||||
if (!opts.is_typescript_declare) {
|
||||
// Avoid a collision with the namespace closure argument variable if the
|
||||
// namespace exports a symbol with the same name as the namespace itself:
|
||||
//
|
||||
// namespace foo {
|
||||
// export let foo = 123
|
||||
// console.log(foo)
|
||||
// }
|
||||
//
|
||||
// TypeScript generates the following code in this case:
|
||||
//
|
||||
// var foo;
|
||||
// (function (foo_1) {
|
||||
// foo_1.foo = 123;
|
||||
// console.log(foo_1.foo);
|
||||
// })(foo || (foo = {}));
|
||||
//
|
||||
if (p.current_scope.members.contains(name_text)) {
|
||||
// Add a "_" to make tests easier to read, since non-bundler tests don't
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
} else {
|
||||
arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable;
|
||||
}
|
||||
ts_namespace.arg_ref = arg_ref;
|
||||
}
|
||||
p.popScope();
|
||||
|
||||
if (!opts.is_typescript_declare) {
|
||||
name.ref = p.declareSymbol(.ts_namespace, name_loc, name_text) catch bun.outOfMemory();
|
||||
try p.ref_to_ts_namespace_member.put(p.allocator, name.ref.?, ns_member_data);
|
||||
}
|
||||
|
||||
return p.s(S.Namespace{
|
||||
.name = name,
|
||||
.arg = arg_ref,
|
||||
.stmts = stmts.items,
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
|
||||
pub fn parseTypeScriptImportEqualsStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions, default_name_loc: logger.Loc, default_name: string) anyerror!Stmt {
|
||||
try p.lexer.expect(.t_equals);
|
||||
|
||||
const kind = S.Local.Kind.k_const;
|
||||
const name = p.lexer.identifier;
|
||||
const target = p.newExpr(E.Identifier{ .ref = p.storeNameInRef(name) catch unreachable }, p.lexer.loc());
|
||||
var value = target;
|
||||
try p.lexer.expect(.t_identifier);
|
||||
|
||||
if (strings.eqlComptime(name, "require") and p.lexer.token == .t_open_paren) {
|
||||
// "import ns = require('x')"
|
||||
try p.lexer.next();
|
||||
const path = p.newExpr(try p.lexer.toEString(), p.lexer.loc());
|
||||
try p.lexer.expect(.t_string_literal);
|
||||
try p.lexer.expect(.t_close_paren);
|
||||
if (!opts.is_typescript_declare) {
|
||||
const args = try ExprNodeList.one(p.allocator, path);
|
||||
value = p.newExpr(E.Call{ .target = target, .close_paren_loc = p.lexer.loc(), .args = args }, loc);
|
||||
}
|
||||
} else {
|
||||
// "import Foo = Bar"
|
||||
// "import Foo = Bar.Baz"
|
||||
var prev_value = value;
|
||||
while (p.lexer.token == .t_dot) : (prev_value = value) {
|
||||
try p.lexer.next();
|
||||
value = p.newExpr(E.Dot{ .target = prev_value, .name = p.lexer.identifier, .name_loc = p.lexer.loc() }, loc);
|
||||
try p.lexer.expect(.t_identifier);
|
||||
}
|
||||
}
|
||||
|
||||
try p.lexer.expectOrInsertSemicolon();
|
||||
|
||||
if (opts.is_typescript_declare) {
|
||||
// "import type foo = require('bar');"
|
||||
// "import type foo = bar.baz;"
|
||||
return p.s(S.TypeScript{}, loc);
|
||||
}
|
||||
|
||||
const ref = p.declareSymbol(.constant, default_name_loc, default_name) catch unreachable;
|
||||
var decls = p.allocator.alloc(Decl, 1) catch unreachable;
|
||||
decls[0] = Decl{
|
||||
.binding = p.b(B.Identifier{ .ref = ref }, default_name_loc),
|
||||
.value = value,
|
||||
};
|
||||
return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
|
||||
}
|
||||
|
||||
pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||||
try p.lexer.expect(.t_enum);
|
||||
const name_loc = p.lexer.loc();
|
||||
const name_text = p.lexer.identifier;
|
||||
try p.lexer.expect(.t_identifier);
|
||||
var name = LocRef{ .loc = name_loc, .ref = Ref.None };
|
||||
|
||||
// Generate the namespace object
|
||||
var arg_ref: Ref = undefined;
|
||||
const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, true);
|
||||
const exported_members = ts_namespace.exported_members;
|
||||
const enum_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members };
|
||||
|
||||
// Declare the enum and create the scope
|
||||
const scope_index = p.scopes_in_order.items.len;
|
||||
if (!opts.is_typescript_declare) {
|
||||
name.ref = try p.declareSymbol(.ts_enum, name_loc, name_text);
|
||||
_ = try p.pushScopeForParsePass(.entry, loc);
|
||||
p.current_scope.ts_namespace = ts_namespace;
|
||||
p.ref_to_ts_namespace_member.putNoClobber(p.allocator, name.ref.?, enum_member_data) catch bun.outOfMemory();
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_open_brace);
|
||||
|
||||
// Parse the body
|
||||
var values = std.ArrayList(js_ast.EnumValue).init(p.allocator);
|
||||
while (p.lexer.token != .t_close_brace) {
|
||||
var value = js_ast.EnumValue{ .loc = p.lexer.loc(), .ref = Ref.None, .name = undefined, .value = null };
|
||||
var needs_symbol = false;
|
||||
|
||||
// Parse the name
|
||||
if (p.lexer.token == .t_string_literal) {
|
||||
value.name = (try p.lexer.toUTF8EString()).slice8();
|
||||
needs_symbol = js_lexer.isIdentifier(value.name);
|
||||
} else if (p.lexer.isIdentifierOrKeyword()) {
|
||||
value.name = p.lexer.identifier;
|
||||
needs_symbol = true;
|
||||
} else {
|
||||
try p.lexer.expect(.t_identifier);
|
||||
// error early, name is still `undefined`
|
||||
return error.SyntaxError;
|
||||
}
|
||||
try p.lexer.next();
|
||||
|
||||
// Identifiers can be referenced by other values
|
||||
if (!opts.is_typescript_declare and needs_symbol) {
|
||||
value.ref = try p.declareSymbol(.other, value.loc, value.name);
|
||||
}
|
||||
|
||||
// Parse the initializer
|
||||
if (p.lexer.token == .t_equals) {
|
||||
try p.lexer.next();
|
||||
value.value = try p.parseExpr(.comma);
|
||||
}
|
||||
|
||||
values.append(value) catch unreachable;
|
||||
|
||||
exported_members.put(p.allocator, value.name, .{
|
||||
.loc = value.loc,
|
||||
.data = .enum_property,
|
||||
}) catch bun.outOfMemory();
|
||||
|
||||
if (p.lexer.token != .t_comma and p.lexer.token != .t_semicolon) {
|
||||
break;
|
||||
}
|
||||
|
||||
try p.lexer.next();
|
||||
}
|
||||
|
||||
if (!opts.is_typescript_declare) {
|
||||
// Avoid a collision with the enum closure argument variable if the
|
||||
// enum exports a symbol with the same name as the enum itself:
|
||||
//
|
||||
// enum foo {
|
||||
// foo = 123,
|
||||
// bar = foo,
|
||||
// }
|
||||
//
|
||||
// TypeScript generates the following code in this case:
|
||||
//
|
||||
// var foo;
|
||||
// (function (foo) {
|
||||
// foo[foo["foo"] = 123] = "foo";
|
||||
// foo[foo["bar"] = 123] = "bar";
|
||||
// })(foo || (foo = {}));
|
||||
//
|
||||
// Whereas in this case:
|
||||
//
|
||||
// enum foo {
|
||||
// bar = foo as any,
|
||||
// }
|
||||
//
|
||||
// TypeScript generates the following code:
|
||||
//
|
||||
// var foo;
|
||||
// (function (foo) {
|
||||
// foo[foo["bar"] = foo] = "bar";
|
||||
// })(foo || (foo = {}));
|
||||
if (p.current_scope.members.contains(name_text)) {
|
||||
// Add a "_" to make tests easier to read, since non-bundler tests don't
|
||||
// run the renamer. For external-facing things the renamer will avoid
|
||||
// collisions automatically so this isn't important for correctness.
|
||||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||||
} else {
|
||||
arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable;
|
||||
}
|
||||
p.ref_to_ts_namespace_member.put(p.allocator, arg_ref, enum_member_data) catch bun.outOfMemory();
|
||||
ts_namespace.arg_ref = arg_ref;
|
||||
|
||||
p.popScope();
|
||||
}
|
||||
|
||||
try p.lexer.expect(.t_close_brace);
|
||||
|
||||
if (opts.is_typescript_declare) {
|
||||
if (opts.is_namespace_scope and opts.is_export) {
|
||||
p.has_non_local_export_declare_inside_namespace = true;
|
||||
}
|
||||
|
||||
return p.s(S.TypeScript{}, loc);
|
||||
}
|
||||
|
||||
// Save these for when we do out-of-order enum visiting
|
||||
//
|
||||
// Make a copy of "scopesInOrder" instead of a slice or index since
|
||||
// the original array may be flattened in the future by
|
||||
// "popAndFlattenScope"
|
||||
p.scopes_in_order_for_enum.putNoClobber(
|
||||
p.allocator,
|
||||
loc,
|
||||
scope_order_clone: {
|
||||
var count: usize = 0;
|
||||
for (p.scopes_in_order.items[scope_index..]) |i| {
|
||||
if (i != null) count += 1;
|
||||
}
|
||||
|
||||
const items = p.allocator.alloc(ScopeOrder, count) catch bun.outOfMemory();
|
||||
var i: usize = 0;
|
||||
for (p.scopes_in_order.items[scope_index..]) |item| {
|
||||
items[i] = item orelse continue;
|
||||
i += 1;
|
||||
}
|
||||
break :scope_order_clone items;
|
||||
},
|
||||
) catch bun.outOfMemory();
|
||||
|
||||
return p.s(S.Enum{
|
||||
.name = name,
|
||||
.arg = arg_ref,
|
||||
.values = values.items,
|
||||
.is_export = opts.is_export,
|
||||
}, loc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const LocRef = js_ast.LocRef;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Decl = G.Decl;
|
||||
|
||||
const js_lexer = bun.js_lexer;
|
||||
const T = js_lexer.T;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const ParseStatementOptions = js_parser.ParseStatementOptions;
|
||||
const Ref = js_parser.Ref;
|
||||
const ScopeOrder = js_parser.ScopeOrder;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
|
||||
const std = @import("std");
|
||||
const List = std.ArrayListUnmanaged;
|
||||
const ListManaged = std.ArrayList;
|
||||
1317
src/ast/skipTypescript.zig
Normal file
1317
src/ast/skipTypescript.zig
Normal file
File diff suppressed because it is too large
Load Diff
133
src/ast/symbols.zig
Normal file
133
src/ast/symbols.zig
Normal file
@@ -0,0 +1,133 @@
|
||||
pub fn Symbols(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
|
||||
pub fn findSymbol(noalias p: *P, loc: logger.Loc, name: string) !FindSymbolResult {
|
||||
return findSymbolWithRecordUsage(p, loc, name, true);
|
||||
}
|
||||
|
||||
pub fn findSymbolWithRecordUsage(noalias p: *P, loc: logger.Loc, name: string, comptime record_usage: bool) !FindSymbolResult {
|
||||
var declare_loc: logger.Loc = logger.Loc.Empty;
|
||||
var is_inside_with_scope = false;
|
||||
// This function can show up in profiling.
|
||||
// That's part of why we do this.
|
||||
// Instead of rehashing `name` for every scope, we do it just once.
|
||||
const hash = Scope.getMemberHash(name);
|
||||
const allocator = p.allocator;
|
||||
|
||||
const ref: Ref = brk: {
|
||||
var current: ?*Scope = p.current_scope;
|
||||
|
||||
var did_forbid_arguments = false;
|
||||
|
||||
while (current) |scope| : (current = current.?.parent) {
|
||||
// Track if we're inside a "with" statement body
|
||||
if (scope.kind == .with) {
|
||||
is_inside_with_scope = true;
|
||||
}
|
||||
|
||||
// Forbid referencing "arguments" inside class bodies
|
||||
if (scope.forbid_arguments and !did_forbid_arguments and strings.eqlComptime(name, "arguments")) {
|
||||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||||
p.log.addRangeErrorFmt(p.source, r, allocator, "Cannot access \"{s}\" here", .{name}) catch unreachable;
|
||||
did_forbid_arguments = true;
|
||||
}
|
||||
|
||||
// Is the symbol a member of this scope?
|
||||
if (scope.getMemberWithHash(name, hash)) |member| {
|
||||
declare_loc = member.loc;
|
||||
break :brk member.ref;
|
||||
}
|
||||
|
||||
// Is the symbol a member of this scope's TypeScript namespace?
|
||||
if (scope.ts_namespace) |ts_namespace| {
|
||||
if (ts_namespace.exported_members.get(name)) |member| {
|
||||
if (member.data.isEnum() == ts_namespace.is_enum_scope) {
|
||||
declare_loc = member.loc;
|
||||
// If this is an identifier from a sibling TypeScript namespace, then we're
|
||||
// going to have to generate a property access instead of a simple reference.
|
||||
// Lazily-generate an identifier that represents this property access.
|
||||
const gop = try ts_namespace.property_accesses.getOrPut(p.allocator, name);
|
||||
if (!gop.found_existing) {
|
||||
const ref = try p.newSymbol(.other, name);
|
||||
gop.value_ptr.* = ref;
|
||||
p.symbols.items[ref.inner_index].namespace_alias = .{
|
||||
.namespace_ref = ts_namespace.arg_ref,
|
||||
.alias = name,
|
||||
};
|
||||
break :brk ref;
|
||||
}
|
||||
break :brk gop.value_ptr.*;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allocate an "unbound" symbol
|
||||
p.checkForNonBMPCodePoint(loc, name);
|
||||
if (comptime !record_usage) {
|
||||
return FindSymbolResult{
|
||||
.ref = Ref.None,
|
||||
.declare_loc = loc,
|
||||
.is_inside_with_scope = is_inside_with_scope,
|
||||
};
|
||||
}
|
||||
|
||||
const gpe = p.module_scope.getOrPutMemberWithHash(allocator, name, hash) catch unreachable;
|
||||
|
||||
// I don't think this happens?
|
||||
if (gpe.found_existing) {
|
||||
const existing = gpe.value_ptr.*;
|
||||
declare_loc = existing.loc;
|
||||
break :brk existing.ref;
|
||||
}
|
||||
|
||||
const _ref = p.newSymbol(.unbound, name) catch unreachable;
|
||||
|
||||
gpe.key_ptr.* = name;
|
||||
gpe.value_ptr.* = js_ast.Scope.Member{ .ref = _ref, .loc = loc };
|
||||
|
||||
declare_loc = loc;
|
||||
|
||||
break :brk _ref;
|
||||
};
|
||||
|
||||
// If we had to pass through a "with" statement body to get to the symbol
|
||||
// declaration, then this reference could potentially also refer to a
|
||||
// property on the target object of the "with" statement. We must not rename
|
||||
// it or we risk changing the behavior of the code.
|
||||
if (is_inside_with_scope) {
|
||||
p.symbols.items[ref.innerIndex()].must_not_be_renamed = true;
|
||||
}
|
||||
|
||||
// Track how many times we've referenced this symbol
|
||||
if (comptime record_usage) p.recordUsage(ref);
|
||||
|
||||
return FindSymbolResult{
|
||||
.ref = ref,
|
||||
.declare_loc = declare_loc,
|
||||
.is_inside_with_scope = is_inside_with_scope,
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const bun = @import("bun");
|
||||
const js_lexer = bun.js_lexer;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const Scope = js_ast.Scope;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const FindSymbolResult = js_parser.FindSymbolResult;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const Ref = js_parser.Ref;
|
||||
const TypeScript = js_parser.TypeScript;
|
||||
1367
src/ast/visit.zig
Normal file
1367
src/ast/visit.zig
Normal file
File diff suppressed because it is too large
Load Diff
438
src/ast/visitBinaryExpression.zig
Normal file
438
src/ast/visitBinaryExpression.zig
Normal file
@@ -0,0 +1,438 @@
|
||||
pub fn CreateBinaryExpressionVisitor(
|
||||
comptime parser_feature__typescript: bool,
|
||||
comptime parser_feature__jsx: JSXTransformType,
|
||||
comptime parser_feature__scan_only: bool,
|
||||
) type {
|
||||
return struct {
|
||||
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
|
||||
|
||||
pub const BinaryExpressionVisitor = struct {
|
||||
e: *E.Binary,
|
||||
loc: logger.Loc,
|
||||
in: ExprIn,
|
||||
|
||||
/// Input for visiting the left child
|
||||
left_in: ExprIn,
|
||||
|
||||
/// "Local variables" passed from "checkAndPrepare" to "visitRightAndFinish"
|
||||
is_stmt_expr: bool = false,
|
||||
|
||||
pub fn visitRightAndFinish(
|
||||
v: *BinaryExpressionVisitor,
|
||||
p: *P,
|
||||
) Expr {
|
||||
var e_ = v.e;
|
||||
const is_call_target = @as(Expr.Tag, p.call_target) == .e_binary and e_ == p.call_target.e_binary;
|
||||
// const is_stmt_expr = @as(Expr.Tag, p.stmt_expr_value) == .e_binary and expr.data.e_binary == p.stmt_expr_value.e_binary;
|
||||
const was_anonymous_named_expr = e_.right.isAnonymousNamed();
|
||||
|
||||
// Mark the control flow as dead if the branch is never taken
|
||||
switch (e_.op) {
|
||||
.bin_logical_or => {
|
||||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||||
if (side_effects.ok and side_effects.value) {
|
||||
// "true || dead"
|
||||
const old = p.is_control_flow_dead;
|
||||
p.is_control_flow_dead = true;
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
p.is_control_flow_dead = old;
|
||||
} else {
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
}
|
||||
},
|
||||
.bin_logical_and => {
|
||||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||||
if (side_effects.ok and !side_effects.value) {
|
||||
// "false && dead"
|
||||
const old = p.is_control_flow_dead;
|
||||
p.is_control_flow_dead = true;
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
p.is_control_flow_dead = old;
|
||||
} else {
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
}
|
||||
},
|
||||
.bin_nullish_coalescing => {
|
||||
const side_effects = SideEffects.toNullOrUndefined(p, e_.left.data);
|
||||
if (side_effects.ok and !side_effects.value) {
|
||||
// "notNullOrUndefined ?? dead"
|
||||
const old = p.is_control_flow_dead;
|
||||
p.is_control_flow_dead = true;
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
p.is_control_flow_dead = old;
|
||||
} else {
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
}
|
||||
},
|
||||
else => {
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
},
|
||||
}
|
||||
|
||||
// Always put constants on the right for equality comparisons to help
|
||||
// reduce the number of cases we have to check during pattern matching. We
|
||||
// can only reorder expressions that do not have any side effects.
|
||||
switch (e_.op) {
|
||||
.bin_loose_eq, .bin_loose_ne, .bin_strict_eq, .bin_strict_ne => {
|
||||
if (SideEffects.isPrimitiveToReorder(e_.left.data) and !SideEffects.isPrimitiveToReorder(e_.right.data)) {
|
||||
const _left = e_.left;
|
||||
const _right = e_.right;
|
||||
e_.left = _right;
|
||||
e_.right = _left;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
switch (e_.op) {
|
||||
.bin_comma => {
|
||||
// "(1, 2)" => "2"
|
||||
// "(sideEffects(), 2)" => "(sideEffects(), 2)"
|
||||
if (p.options.features.minify_syntax) {
|
||||
e_.left = SideEffects.simplifyUnusedExpr(p, e_.left) orelse return e_.right;
|
||||
}
|
||||
},
|
||||
.bin_loose_eq => {
|
||||
const equality = e_.left.data.eql(e_.right.data, p, .loose);
|
||||
if (equality.ok) {
|
||||
if (equality.is_require_main_and_module) {
|
||||
p.ignoreUsageOfRuntimeRequire();
|
||||
p.ignoreUsage(p.module_ref);
|
||||
return p.valueForImportMetaMain(false, v.loc);
|
||||
}
|
||||
|
||||
return p.newExpr(
|
||||
E.Boolean{ .value = equality.equal },
|
||||
v.loc,
|
||||
);
|
||||
}
|
||||
|
||||
if (p.options.features.minify_syntax) {
|
||||
// "x == void 0" => "x == null"
|
||||
if (e_.left.data == .e_undefined) {
|
||||
e_.left.data = .{ .e_null = E.Null{} };
|
||||
} else if (e_.right.data == .e_undefined) {
|
||||
e_.right.data = .{ .e_null = E.Null{} };
|
||||
}
|
||||
}
|
||||
|
||||
// const after_op_loc = locAfterOp(e_.);
|
||||
// TODO: warn about equality check
|
||||
// TODO: warn about typeof string
|
||||
|
||||
},
|
||||
.bin_strict_eq => {
|
||||
const equality = e_.left.data.eql(e_.right.data, p, .strict);
|
||||
if (equality.ok) {
|
||||
if (equality.is_require_main_and_module) {
|
||||
p.ignoreUsage(p.module_ref);
|
||||
p.ignoreUsageOfRuntimeRequire();
|
||||
return p.valueForImportMetaMain(false, v.loc);
|
||||
}
|
||||
|
||||
return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc);
|
||||
}
|
||||
|
||||
// const after_op_loc = locAfterOp(e_.);
|
||||
// TODO: warn about equality check
|
||||
// TODO: warn about typeof string
|
||||
},
|
||||
.bin_loose_ne => {
|
||||
const equality = e_.left.data.eql(e_.right.data, p, .loose);
|
||||
if (equality.ok) {
|
||||
if (equality.is_require_main_and_module) {
|
||||
p.ignoreUsage(p.module_ref);
|
||||
p.ignoreUsageOfRuntimeRequire();
|
||||
return p.valueForImportMetaMain(true, v.loc);
|
||||
}
|
||||
|
||||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||||
}
|
||||
// const after_op_loc = locAfterOp(e_.);
|
||||
// TODO: warn about equality check
|
||||
// TODO: warn about typeof string
|
||||
|
||||
// "x != void 0" => "x != null"
|
||||
if (@as(Expr.Tag, e_.right.data) == .e_undefined) {
|
||||
e_.right = p.newExpr(E.Null{}, e_.right.loc);
|
||||
}
|
||||
},
|
||||
.bin_strict_ne => {
|
||||
const equality = e_.left.data.eql(e_.right.data, p, .strict);
|
||||
if (equality.ok) {
|
||||
if (equality.is_require_main_and_module) {
|
||||
p.ignoreUsage(p.module_ref);
|
||||
p.ignoreUsageOfRuntimeRequire();
|
||||
return p.valueForImportMetaMain(true, v.loc);
|
||||
}
|
||||
|
||||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||||
}
|
||||
},
|
||||
.bin_nullish_coalescing => {
|
||||
const nullorUndefined = SideEffects.toNullOrUndefined(p, e_.left.data);
|
||||
if (nullorUndefined.ok) {
|
||||
if (!nullorUndefined.value) {
|
||||
return e_.left;
|
||||
} else if (nullorUndefined.side_effects == .no_side_effects) {
|
||||
// "(null ?? fn)()" => "fn()"
|
||||
// "(null ?? this.fn)" => "this.fn"
|
||||
// "(null ?? this.fn)()" => "(0, this.fn)()"
|
||||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||||
return Expr.joinWithComma(Expr{ .data = .{ .e_number = .{ .value = 0.0 } }, .loc = e_.left.loc }, e_.right, p.allocator);
|
||||
}
|
||||
|
||||
return e_.right;
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_logical_or => {
|
||||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||||
if (side_effects.ok and side_effects.value) {
|
||||
return e_.left;
|
||||
} else if (side_effects.ok and side_effects.side_effects == .no_side_effects) {
|
||||
// "(0 || fn)()" => "fn()"
|
||||
// "(0 || this.fn)" => "this.fn"
|
||||
// "(0 || this.fn)()" => "(0, this.fn)()"
|
||||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||||
return Expr.joinWithComma(Expr{ .data = Prefill.Data.Zero, .loc = e_.left.loc }, e_.right, p.allocator);
|
||||
}
|
||||
|
||||
return e_.right;
|
||||
}
|
||||
},
|
||||
.bin_logical_and => {
|
||||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||||
if (side_effects.ok) {
|
||||
if (!side_effects.value) {
|
||||
return e_.left;
|
||||
} else if (side_effects.side_effects == .no_side_effects) {
|
||||
// "(1 && fn)()" => "fn()"
|
||||
// "(1 && this.fn)" => "this.fn"
|
||||
// "(1 && this.fn)()" => "(0, this.fn)()"
|
||||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||||
return Expr.joinWithComma(Expr{ .data = Prefill.Data.Zero, .loc = e_.left.loc }, e_.right, p.allocator);
|
||||
}
|
||||
|
||||
return e_.right;
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_add => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{ .value = vals[0] + vals[1] }, v.loc);
|
||||
}
|
||||
|
||||
// "'abc' + 'xyz'" => "'abcxyz'"
|
||||
if (foldStringAddition(e_.left, e_.right, p.allocator, .normal)) |res| {
|
||||
return res;
|
||||
}
|
||||
|
||||
// "(x + 'abc') + 'xyz'" => "'abcxyz'"
|
||||
if (e_.left.data.as(.e_binary)) |left| {
|
||||
if (left.op == .bin_add) {
|
||||
if (foldStringAddition(left.right, e_.right, p.allocator, .nested_left)) |result| {
|
||||
return p.newExpr(E.Binary{
|
||||
.left = left.left,
|
||||
.right = result,
|
||||
.op = .bin_add,
|
||||
}, e_.left.loc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_sub => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{ .value = vals[0] - vals[1] }, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_mul => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{ .value = vals[0] * vals[1] }, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_div => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{ .value = vals[0] / vals[1] }, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_rem => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
const fmod = @extern(*const fn (f64, f64) callconv(.C) f64, .{ .name = "fmod" });
|
||||
return p.newExpr(
|
||||
// Use libc fmod here to be consistent with what JavaScriptCore does
|
||||
// https://github.com/oven-sh/WebKit/blob/7a0b13626e5db69aa5a32d037431d381df5dfb61/Source/JavaScriptCore/runtime/MathCommon.cpp#L574-L597
|
||||
E.Number{ .value = if (comptime Environment.isNative) fmod(vals[0], vals[1]) else std.math.mod(f64, vals[0], vals[1]) catch 0 },
|
||||
v.loc,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_pow => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{ .value = jsc.math.pow(vals[0], vals[1]) }, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_shl => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
const left = floatToInt32(vals[0]);
|
||||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||||
const result: i32 = @bitCast(std.math.shl(i32, left, right));
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt(result),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_shr => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
const left = floatToInt32(vals[0]);
|
||||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||||
const result: i32 = @bitCast(std.math.shr(i32, left, right));
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt(result),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_u_shr => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
const left: u32 = @bitCast(floatToInt32(vals[0]));
|
||||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||||
const result: u32 = std.math.shr(u32, left, right);
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt(result),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_bitwise_and => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt((floatToInt32(vals[0]) & floatToInt32(vals[1]))),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_bitwise_or => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt((floatToInt32(vals[0]) | floatToInt32(vals[1]))),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
.bin_bitwise_xor => {
|
||||
if (p.should_fold_typescript_constant_expressions) {
|
||||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||||
return p.newExpr(E.Number{
|
||||
.value = @floatFromInt((floatToInt32(vals[0]) ^ floatToInt32(vals[1]))),
|
||||
}, v.loc);
|
||||
}
|
||||
}
|
||||
},
|
||||
// ---------------------------------------------------------------------------------------------------
|
||||
.bin_assign => {
|
||||
// Optionally preserve the name
|
||||
if (e_.left.data == .e_identifier) {
|
||||
e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.data.e_identifier.ref.innerIndex()].original_name, was_anonymous_named_expr);
|
||||
}
|
||||
},
|
||||
.bin_nullish_coalescing_assign, .bin_logical_or_assign => {
|
||||
// Special case `{}.field ??= value` to minify to `value`
|
||||
// This optimization is specifically to target this pattern in HMR:
|
||||
// `import.meta.hot.data.etc ??= init()`
|
||||
if (e_.left.data.as(.e_dot)) |dot| {
|
||||
if (dot.target.data.as(.e_object)) |obj| {
|
||||
if (obj.properties.len == 0) {
|
||||
if (!bun.strings.eqlComptime(dot.name, "__proto__"))
|
||||
return e_.right;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
return Expr{ .loc = v.loc, .data = .{ .e_binary = e_ } };
|
||||
}
|
||||
|
||||
pub fn checkAndPrepare(v: *BinaryExpressionVisitor, p: *P) ?Expr {
|
||||
var e_ = v.e;
|
||||
switch (e_.left.data) {
|
||||
// Special-case private identifiers
|
||||
.e_private_identifier => |_private| {
|
||||
if (e_.op == .bin_in) {
|
||||
var private = _private;
|
||||
const name = p.loadNameFromRef(private.ref);
|
||||
const result = p.findSymbol(e_.left.loc, name) catch unreachable;
|
||||
private.ref = result.ref;
|
||||
|
||||
// Unlike regular identifiers, there are no unbound private identifiers
|
||||
const kind: Symbol.Kind = p.symbols.items[result.ref.innerIndex()].kind;
|
||||
if (!Symbol.isKindPrivate(kind)) {
|
||||
const r = logger.Range{ .loc = e_.left.loc, .len = @as(i32, @intCast(name.len)) };
|
||||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{s}\" must be declared in an enclosing class", .{name}) catch unreachable;
|
||||
}
|
||||
|
||||
e_.right = p.visitExpr(e_.right);
|
||||
e_.left = .{ .data = .{ .e_private_identifier = private }, .loc = e_.left.loc };
|
||||
|
||||
// privateSymbolNeedsToBeLowered
|
||||
return Expr{ .loc = v.loc, .data = .{ .e_binary = e_ } };
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
v.is_stmt_expr = p.stmt_expr_value == .e_binary and p.stmt_expr_value.e_binary == e_;
|
||||
|
||||
v.left_in = ExprIn{
|
||||
.assign_target = e_.op.binaryAssignTarget(),
|
||||
};
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const jsc = bun.jsc;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const Symbol = js_ast.Symbol;
|
||||
|
||||
const js_parser = bun.js_parser;
|
||||
const ExprIn = js_parser.ExprIn;
|
||||
const JSXTransformType = js_parser.JSXTransformType;
|
||||
const Prefill = js_parser.Prefill;
|
||||
const SideEffects = js_parser.SideEffects;
|
||||
const floatToInt32 = js_parser.floatToInt32;
|
||||
const foldStringAddition = js_parser.foldStringAddition;
|
||||
const options = js_parser.options;
|
||||
1641
src/ast/visitExpr.zig
Normal file
1641
src/ast/visitExpr.zig
Normal file
File diff suppressed because it is too large
Load Diff
1571
src/ast/visitStmt.zig
Normal file
1571
src/ast/visitStmt.zig
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user