mirror of
https://github.com/oven-sh/bun
synced 2026-02-13 20:39:05 +00:00
24171 lines
1.1 MiB
24171 lines
1.1 MiB
/// ** IMPORTANT **
|
||
/// ** When making changes to the JavaScript Parser that impact runtime behavior or fix bugs **
|
||
/// ** you must also increment the `expected_version` in RuntimeTranspilerCache.zig **
|
||
/// ** IMPORTANT **
|
||
pub const std = @import("std");
|
||
pub const logger = bun.logger;
|
||
pub const js_lexer = bun.js_lexer;
|
||
pub const importRecord = @import("./import_record.zig");
|
||
pub const js_ast = bun.JSAst;
|
||
pub const options = @import("./options.zig");
|
||
pub const js_printer = bun.js_printer;
|
||
pub const renamer = @import("./renamer.zig");
|
||
const _runtime = @import("./runtime.zig");
|
||
pub const RuntimeImports = _runtime.Runtime.Imports;
|
||
pub const RuntimeFeatures = _runtime.Runtime.Features;
|
||
pub const RuntimeNames = _runtime.Runtime.Names;
|
||
pub const fs = @import("./fs.zig");
|
||
const bun = @import("root").bun;
|
||
const string = bun.string;
|
||
const Output = bun.Output;
|
||
const Global = bun.Global;
|
||
const Environment = bun.Environment;
|
||
const strings = bun.strings;
|
||
const MutableString = @import("./string_mutable.zig").MutableString;
|
||
const stringZ = bun.stringZ;
|
||
const default_allocator = bun.default_allocator;
|
||
const C = bun.C;
|
||
const G = js_ast.G;
|
||
const Define = @import("./defines.zig").Define;
|
||
const DefineData = @import("./defines.zig").DefineData;
|
||
const FeatureFlags = @import("./feature_flags.zig");
|
||
pub const isPackagePath = @import("./resolver/resolver.zig").isPackagePath;
|
||
pub const ImportKind = importRecord.ImportKind;
|
||
pub const BindingNodeIndex = js_ast.BindingNodeIndex;
|
||
const Decl = G.Decl;
|
||
const Property = G.Property;
|
||
const Arg = G.Arg;
|
||
const Allocator = std.mem.Allocator;
|
||
pub const StmtNodeIndex = js_ast.StmtNodeIndex;
|
||
pub const ExprNodeIndex = js_ast.ExprNodeIndex;
|
||
pub const ExprNodeList = js_ast.ExprNodeList;
|
||
pub const StmtNodeList = js_ast.StmtNodeList;
|
||
pub const BindingNodeList = js_ast.BindingNodeList;
|
||
const DeclaredSymbol = js_ast.DeclaredSymbol;
|
||
const JSC = bun.JSC;
|
||
const Index = @import("./ast/base.zig").Index;
|
||
|
||
fn _disabledAssert(_: bool) void {
|
||
if (!Environment.allow_assert) @compileError("assert is missing an if (Environment.allow_assert)");
|
||
unreachable;
|
||
}
|
||
|
||
const assert = if (Environment.allow_assert) bun.assert else _disabledAssert;
|
||
const debug = Output.scoped(.JSParser, false);
|
||
const ExprListLoc = struct {
|
||
list: ExprNodeList,
|
||
loc: logger.Loc,
|
||
};
|
||
pub const LocRef = js_ast.LocRef;
|
||
pub const S = js_ast.S;
|
||
pub const B = js_ast.B;
|
||
pub const T = js_lexer.T;
|
||
pub const E = js_ast.E;
|
||
pub const Stmt = js_ast.Stmt;
|
||
pub const Expr = js_ast.Expr;
|
||
pub const Binding = js_ast.Binding;
|
||
pub const Symbol = js_ast.Symbol;
|
||
pub const Level = js_ast.Op.Level;
|
||
pub const Op = js_ast.Op;
|
||
pub const Scope = js_ast.Scope;
|
||
pub const locModuleScope = logger.Loc{ .start = -100 };
|
||
const Ref = @import("./ast/base.zig").Ref;
|
||
const RefHashCtx = @import("./ast/base.zig").RefHashCtx;
|
||
|
||
pub const StringHashMap = bun.StringHashMap;
|
||
pub const AutoHashMap = std.AutoHashMap;
|
||
const StringHashMapUnmanaged = bun.StringHashMapUnmanaged;
|
||
const ObjectPool = @import("./pool.zig").ObjectPool;
|
||
const NodeFallbackModules = @import("./node_fallbacks.zig");
|
||
|
||
const DeferredImportNamespace = struct {
|
||
namespace: LocRef,
|
||
import_record_id: u32,
|
||
};
|
||
|
||
const SkipTypeParameterResult = enum {
|
||
did_not_skip_anything,
|
||
could_be_type_cast,
|
||
definitely_type_parameters,
|
||
};
|
||
|
||
const TypeParameterFlag = packed struct {
|
||
/// TypeScript 4.7
|
||
allow_in_out_variance_annotations: bool = false,
|
||
|
||
/// TypeScript 5.0
|
||
allow_const_modifier: bool = false,
|
||
|
||
/// Allow "<>" without any type parameters
|
||
allow_empty_type_parameters: bool = false,
|
||
};
|
||
|
||
const JSXImport = enum {
|
||
jsx,
|
||
jsxDEV,
|
||
jsxs,
|
||
Fragment,
|
||
createElement,
|
||
|
||
pub const Symbols = struct {
|
||
jsx: ?LocRef = null,
|
||
jsxDEV: ?LocRef = null,
|
||
jsxs: ?LocRef = null,
|
||
Fragment: ?LocRef = null,
|
||
createElement: ?LocRef = null,
|
||
|
||
pub fn get(this: *const Symbols, name: []const u8) ?Ref {
|
||
if (strings.eqlComptime(name, "jsx")) return if (this.jsx) |jsx| jsx.ref.? else null;
|
||
if (strings.eqlComptime(name, "jsxDEV")) return if (this.jsxDEV) |jsx| jsx.ref.? else null;
|
||
if (strings.eqlComptime(name, "jsxs")) return if (this.jsxs) |jsxs| jsxs.ref.? else null;
|
||
if (strings.eqlComptime(name, "Fragment")) return if (this.Fragment) |Fragment| Fragment.ref.? else null;
|
||
if (strings.eqlComptime(name, "createElement")) return if (this.createElement) |createElement| createElement.ref.? else null;
|
||
return null;
|
||
}
|
||
|
||
pub fn getWithTag(this: *const Symbols, tag: JSXImport) ?Ref {
|
||
return switch (tag) {
|
||
.jsx => if (this.jsx) |jsx| jsx.ref.? else null,
|
||
.jsxDEV => if (this.jsxDEV) |jsx| jsx.ref.? else null,
|
||
.jsxs => if (this.jsxs) |jsxs| jsxs.ref.? else null,
|
||
.Fragment => if (this.Fragment) |Fragment| Fragment.ref.? else null,
|
||
.createElement => if (this.createElement) |createElement| createElement.ref.? else null,
|
||
};
|
||
}
|
||
|
||
pub fn runtimeImportNames(this: *const Symbols, buf: *[3]string) []const string {
|
||
var i: usize = 0;
|
||
if (this.jsxDEV != null) {
|
||
bun.assert(this.jsx == null); // we should never end up with this in the same file
|
||
buf[0] = "jsxDEV";
|
||
i += 1;
|
||
}
|
||
|
||
if (this.jsx != null) {
|
||
bun.assert(this.jsxDEV == null); // we should never end up with this in the same file
|
||
buf[0] = "jsx";
|
||
i += 1;
|
||
}
|
||
|
||
if (this.jsxs != null) {
|
||
buf[i] = "jsxs";
|
||
i += 1;
|
||
}
|
||
|
||
if (this.Fragment != null) {
|
||
buf[i] = "Fragment";
|
||
i += 1;
|
||
}
|
||
|
||
return buf[0..i];
|
||
}
|
||
pub fn sourceImportNames(this: *const Symbols) []const string {
|
||
return if (this.createElement != null) &[_]string{"createElement"} else &[_]string{};
|
||
}
|
||
};
|
||
};
|
||
|
||
const arguments_str: string = "arguments";
|
||
|
||
// Dear reader,
|
||
// There are some things you should know about this file to make it easier for humans to read
|
||
// "P" is the internal parts of the parser
|
||
// "p.e" allocates a new Expr
|
||
// "p.b" allocates a new Binding
|
||
// "p.s" allocates a new Stmt
|
||
// We do it this way so if we want to refactor how these are allocated in the future, we only have to modify one function to change it everywhere
|
||
// Everything in JavaScript is either an Expression, a Binding, or a Statement.
|
||
// Expression: foo(1)
|
||
// Statement: let a = 1;
|
||
// Binding: a
|
||
// While the names for Expr, Binding, and Stmt are directly copied from esbuild, those were likely inspired by Go's parser.
|
||
// which is another example of a very fast parser.
|
||
|
||
const ScopeOrderList = std.ArrayListUnmanaged(?ScopeOrder);
|
||
|
||
const JSXFactoryName = "JSX";
|
||
const JSXAutomaticName = "jsx_module";
|
||
// kept as a static reference
|
||
const exports_string_name: string = "exports";
|
||
const MacroRefs = std.AutoArrayHashMap(Ref, u32);
|
||
|
||
pub const AllocatedNamesPool = ObjectPool(
|
||
std.ArrayList(string),
|
||
struct {
|
||
pub fn init(allocator: std.mem.Allocator) anyerror!std.ArrayList(string) {
|
||
return std.ArrayList(string).init(allocator);
|
||
}
|
||
}.init,
|
||
true,
|
||
4,
|
||
);
|
||
|
||
const Substitution = union(enum) {
|
||
success: Expr,
|
||
failure: Expr,
|
||
continue_: Expr,
|
||
};
|
||
|
||
/// Concatenate two `E.String`s, mutating BOTH inputs
|
||
/// unless `has_inlined_enum_poison` is set.
|
||
///
|
||
/// Currently inlined enum poison refers to where mutation would cause output
|
||
/// bugs due to inlined enum values sharing `E.String`s. If a new use case
|
||
/// besides inlined enums comes up to set this to true, please rename the
|
||
/// variable and document it.
|
||
fn joinStrings(left: *const E.String, right: *const E.String, has_inlined_enum_poison: bool) E.String {
|
||
var new = if (has_inlined_enum_poison)
|
||
// Inlined enums can be shared by multiple call sites. In
|
||
// this case, we need to ensure that the ENTIRE rope is
|
||
// cloned. In other situations, the lhs doesn't have any
|
||
// other owner, so it is fine to mutate `lhs.data.end.next`.
|
||
//
|
||
// Consider the following case:
|
||
// const enum A {
|
||
// B = "a" + "b",
|
||
// D = B + "d",
|
||
// };
|
||
// console.log(A.B, A.D);
|
||
left.cloneRopeNodes()
|
||
else
|
||
left.*;
|
||
|
||
// Similarly, the right side has to be cloned for an enum rope too.
|
||
//
|
||
// Consider the following case:
|
||
// const enum A {
|
||
// B = "1" + "2",
|
||
// C = ("3" + B) + "4",
|
||
// };
|
||
// console.log(A.B, A.C);
|
||
const rhs_clone = Expr.Data.Store.append(E.String, if (has_inlined_enum_poison)
|
||
right.cloneRopeNodes()
|
||
else
|
||
right.*);
|
||
|
||
new.push(rhs_clone);
|
||
new.prefer_template = new.prefer_template or rhs_clone.prefer_template;
|
||
|
||
return new;
|
||
}
|
||
|
||
/// Transforming the left operand into a string is not safe if it comes from a
|
||
/// nested AST node.
|
||
const FoldStringAdditionKind = enum {
|
||
// "x" + "y" -> "xy"
|
||
// 1 + "y" -> "1y"
|
||
normal,
|
||
// a + "x" + "y" -> a + "xy"
|
||
// a + 1 + "y" -> a + 1 + y
|
||
nested_left,
|
||
};
|
||
|
||
// NOTE: unlike esbuild's js_ast_helpers.FoldStringAddition, this does mutate
|
||
// the input AST in the case of rope strings
|
||
fn foldStringAddition(l: Expr, r: Expr, allocator: std.mem.Allocator, kind: FoldStringAdditionKind) ?Expr {
|
||
// "See through" inline enum constants
|
||
// TODO: implement foldAdditionPreProcess to fold some more things :)
|
||
var lhs = l.unwrapInlined();
|
||
var rhs = r.unwrapInlined();
|
||
|
||
if (kind != .nested_left) {
|
||
// See comment on `FoldStringAdditionKind` for examples
|
||
switch (rhs.data) {
|
||
.e_string, .e_template => {
|
||
if (lhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||
lhs = str;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
switch (lhs.data) {
|
||
.e_string => |left| {
|
||
if (rhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||
rhs = str;
|
||
}
|
||
|
||
if (left.isUTF8()) {
|
||
switch (rhs.data) {
|
||
// "bar" + "baz" => "barbaz"
|
||
.e_string => |right| {
|
||
if (right.isUTF8()) {
|
||
const has_inlined_enum_poison =
|
||
l.data == .e_inlined_enum or
|
||
r.data == .e_inlined_enum;
|
||
|
||
return Expr.init(E.String, joinStrings(
|
||
left,
|
||
right,
|
||
has_inlined_enum_poison,
|
||
), lhs.loc);
|
||
}
|
||
},
|
||
// "bar" + `baz${bar}` => `barbaz${bar}`
|
||
.e_template => |right| {
|
||
if (right.head.isUTF8()) {
|
||
return Expr.init(E.Template, E.Template{
|
||
.parts = right.parts,
|
||
.head = .{ .cooked = joinStrings(
|
||
left,
|
||
&right.head.cooked,
|
||
l.data == .e_inlined_enum,
|
||
) },
|
||
}, l.loc);
|
||
}
|
||
},
|
||
else => {
|
||
// other constant-foldable ast nodes would have been converted to .e_string
|
||
},
|
||
}
|
||
|
||
// "'x' + `y${z}`" => "`xy${z}`"
|
||
if (rhs.data == .e_template and rhs.data.e_template.tag == null) {}
|
||
}
|
||
|
||
if (left.len() == 0 and rhs.knownPrimitive() == .string) {
|
||
return rhs;
|
||
}
|
||
|
||
return null;
|
||
},
|
||
|
||
.e_template => |left| {
|
||
// "`${x}` + 0" => "`${x}` + '0'"
|
||
if (rhs.toStringExprWithoutSideEffects(allocator)) |str| {
|
||
rhs = str;
|
||
}
|
||
|
||
if (left.tag == null) {
|
||
switch (rhs.data) {
|
||
// `foo${bar}` + "baz" => `foo${bar}baz`
|
||
.e_string => |right| {
|
||
if (right.isUTF8()) {
|
||
// Mutation of this node is fine because it will be not
|
||
// be shared by other places. Note that e_template will
|
||
// be treated by enums as strings, but will not be
|
||
// inlined unless they could be converted into
|
||
// .e_string.
|
||
if (left.parts.len > 0) {
|
||
const i = left.parts.len - 1;
|
||
const last = left.parts[i];
|
||
if (last.tail.isUTF8()) {
|
||
left.parts[i].tail = .{ .cooked = joinStrings(
|
||
&last.tail.cooked,
|
||
right,
|
||
r.data == .e_inlined_enum,
|
||
) };
|
||
return lhs;
|
||
}
|
||
} else {
|
||
if (left.head.isUTF8()) {
|
||
left.head = .{ .cooked = joinStrings(
|
||
&left.head.cooked,
|
||
right,
|
||
r.data == .e_inlined_enum,
|
||
) };
|
||
return lhs;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
// `foo${bar}` + `a${hi}b` => `foo${bar}a${hi}b`
|
||
.e_template => |right| {
|
||
if (right.tag == null and right.head.isUTF8()) {
|
||
if (left.parts.len > 0) {
|
||
const i = left.parts.len - 1;
|
||
const last = left.parts[i];
|
||
if (last.tail.isUTF8() and right.head.isUTF8()) {
|
||
left.parts[i].tail = .{ .cooked = joinStrings(
|
||
&last.tail.cooked,
|
||
&right.head.cooked,
|
||
r.data == .e_inlined_enum,
|
||
) };
|
||
|
||
left.parts = if (right.parts.len == 0)
|
||
left.parts
|
||
else
|
||
std.mem.concat(
|
||
allocator,
|
||
E.TemplatePart,
|
||
&.{ left.parts, right.parts },
|
||
) catch bun.outOfMemory();
|
||
return lhs;
|
||
}
|
||
} else {
|
||
if (left.head.isUTF8() and right.head.isUTF8()) {
|
||
left.head = .{ .cooked = joinStrings(
|
||
&left.head.cooked,
|
||
&right.head.cooked,
|
||
r.data == .e_inlined_enum,
|
||
) };
|
||
left.parts = right.parts;
|
||
return lhs;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {
|
||
// other constant-foldable ast nodes would have been converted to .e_string
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
else => {
|
||
// other constant-foldable ast nodes would have been converted to .e_string
|
||
},
|
||
}
|
||
|
||
if (rhs.data.as(.e_string)) |right| {
|
||
if (right.len() == 0 and lhs.knownPrimitive() == .string) {
|
||
return lhs;
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
// If we are currently in a hoisted child of the module scope, relocate these
|
||
// declarations to the top level and return an equivalent assignment statement.
|
||
// Make sure to check that the declaration kind is "var" before calling this.
|
||
// And make sure to check that the returned statement is not the zero value.
|
||
//
|
||
// This is done to make some transformations non-destructive
|
||
// Without relocating vars to the top level, simplifying this:
|
||
// if (false) var foo = 1;
|
||
// to nothing is unsafe
|
||
// Because "foo" was defined. And now it's not.
|
||
pub const RelocateVars = struct {
|
||
pub const Mode = enum { normal, for_in_or_for_of };
|
||
|
||
stmt: ?Stmt = null,
|
||
ok: bool = false,
|
||
};
|
||
|
||
const VisitArgsOpts = struct {
|
||
body: []Stmt = &([_]Stmt{}),
|
||
has_rest_arg: bool = false,
|
||
|
||
// This is true if the function is an arrow function or a method
|
||
is_unique_formal_parameters: bool = false,
|
||
};
|
||
|
||
pub fn ExpressionTransposer(
|
||
comptime ContextType: type,
|
||
comptime StateType: type,
|
||
comptime visitor: fn (ptr: *ContextType, arg: Expr, state: StateType) Expr,
|
||
) type {
|
||
return struct {
|
||
pub const Context = ContextType;
|
||
pub const This = @This();
|
||
|
||
context: *Context,
|
||
|
||
pub fn init(c: *Context) This {
|
||
return .{ .context = c };
|
||
}
|
||
|
||
pub fn maybeTransposeIf(self: *This, arg: Expr, state: StateType) Expr {
|
||
switch (arg.data) {
|
||
.e_if => |ex| {
|
||
return Expr.init(E.If, .{
|
||
.yes = self.maybeTransposeIf(ex.yes, state),
|
||
.no = self.maybeTransposeIf(ex.no, state),
|
||
.test_ = ex.test_,
|
||
}, arg.loc);
|
||
},
|
||
else => {
|
||
return visitor(self.context, arg, state);
|
||
},
|
||
}
|
||
}
|
||
|
||
pub fn transposeKnownToBeIf(self: *This, arg: Expr, state: StateType) Expr {
|
||
return Expr.init(E.If, .{
|
||
.yes = self.maybeTransposeIf(arg.data.e_if.yes, state),
|
||
.no = self.maybeTransposeIf(arg.data.e_if.no, state),
|
||
.test_ = arg.data.e_if.test_,
|
||
}, arg.loc);
|
||
}
|
||
};
|
||
}
|
||
|
||
pub fn locAfterOp(e: E.Binary) logger.Loc {
|
||
if (e.left.loc.start < e.right.loc.start) {
|
||
return e.right.loc;
|
||
} else {
|
||
// handle the case when we have transposed the operands
|
||
return e.left.loc;
|
||
}
|
||
}
|
||
const ExportsStringName = "exports";
|
||
|
||
const TransposeState = struct {
|
||
is_await_target: bool = false,
|
||
is_then_catch_target: bool = false,
|
||
is_require_immediately_assigned_to_decl: bool = false,
|
||
loc: logger.Loc = logger.Loc.Empty,
|
||
import_record_tag: ?ImportRecord.Tag = null,
|
||
import_options: Expr = Expr.empty,
|
||
};
|
||
|
||
const JSXTag = struct {
|
||
pub const TagType = enum { fragment, tag };
|
||
pub const Data = union(TagType) {
|
||
fragment: u8,
|
||
tag: Expr,
|
||
|
||
pub fn asExpr(d: *const Data) ?ExprNodeIndex {
|
||
switch (d.*) {
|
||
.tag => |tag| {
|
||
return tag;
|
||
},
|
||
else => {
|
||
return null;
|
||
},
|
||
}
|
||
}
|
||
};
|
||
data: Data,
|
||
range: logger.Range,
|
||
/// Empty string for fragments.
|
||
name: string,
|
||
|
||
pub fn parse(comptime P: type, p: *P) anyerror!JSXTag {
|
||
const loc = p.lexer.loc();
|
||
|
||
// A missing tag is a fragment
|
||
if (p.lexer.token == .t_greater_than) {
|
||
return JSXTag{
|
||
.range = logger.Range{ .loc = loc, .len = 0 },
|
||
.data = Data{ .fragment = 1 },
|
||
.name = "",
|
||
};
|
||
}
|
||
|
||
// The tag is an identifier
|
||
var name = p.lexer.identifier;
|
||
var tag_range = p.lexer.range();
|
||
try p.lexer.expectInsideJSXElementWithName(.t_identifier, "JSX element name");
|
||
|
||
// Certain identifiers are strings
|
||
// <div
|
||
// <button
|
||
// <Hello-:Button
|
||
if (strings.containsComptime(name, "-:") or (p.lexer.token != .t_dot and name[0] >= 'a' and name[0] <= 'z')) {
|
||
return JSXTag{
|
||
.data = Data{ .tag = p.newExpr(E.String{
|
||
.data = name,
|
||
}, loc) },
|
||
.range = tag_range,
|
||
.name = name,
|
||
};
|
||
}
|
||
|
||
// Otherwise, this is an identifier
|
||
// <Button>
|
||
var tag = p.newExpr(E.Identifier{ .ref = try p.storeNameInRef(name) }, loc);
|
||
|
||
// Parse a member expression chain
|
||
// <Button.Red>
|
||
while (p.lexer.token == .t_dot) {
|
||
try p.lexer.nextInsideJSXElement();
|
||
const member_range = p.lexer.range();
|
||
const member = p.lexer.identifier;
|
||
try p.lexer.expectInsideJSXElement(.t_identifier);
|
||
|
||
if (strings.indexOfChar(member, '-')) |index| {
|
||
try p.log.addError(p.source, logger.Loc{ .start = member_range.loc.start + @as(i32, @intCast(index)) }, "Unexpected \"-\"");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
var _name = try p.allocator.alloc(u8, name.len + 1 + member.len);
|
||
bun.copy(u8, _name, name);
|
||
_name[name.len] = '.';
|
||
bun.copy(u8, _name[name.len + 1 .. _name.len], member);
|
||
name = _name;
|
||
tag_range.len = member_range.loc.start + member_range.len - tag_range.loc.start;
|
||
tag = p.newExpr(E.Dot{ .target = tag, .name = member, .name_loc = member_range.loc }, loc);
|
||
}
|
||
|
||
return JSXTag{ .data = Data{ .tag = tag }, .range = tag_range, .name = name };
|
||
}
|
||
};
|
||
|
||
pub const TypeScript = struct {
|
||
// This function is taken from the official TypeScript compiler source code:
|
||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||
pub fn canFollowTypeArgumentsInExpression(p: anytype) bool {
|
||
return switch (p.lexer.token) {
|
||
// These are the only tokens can legally follow a type argument list. So we
|
||
// definitely want to treat them as type arg lists.
|
||
.t_open_paren, // foo<x>(
|
||
.t_no_substitution_template_literal, // foo<T> `...`
|
||
// foo<T> `...${100}...`
|
||
.t_template_head,
|
||
=> true,
|
||
|
||
// A type argument list followed by `<` never makes sense, and a type argument list followed
|
||
// by `>` is ambiguous with a (re-scanned) `>>` operator, so we disqualify both. Also, in
|
||
// this context, `+` and `-` are unary operators, not binary operators.
|
||
.t_less_than,
|
||
.t_greater_than,
|
||
.t_plus,
|
||
.t_minus,
|
||
// TypeScript always sees "t_greater_than" instead of these tokens since
|
||
// their scanner works a little differently than our lexer. So since
|
||
// "t_greater_than" is forbidden above, we also forbid these too.
|
||
.t_greater_than_equals,
|
||
.t_greater_than_greater_than,
|
||
.t_greater_than_greater_than_equals,
|
||
.t_greater_than_greater_than_greater_than,
|
||
.t_greater_than_greater_than_greater_than_equals,
|
||
=> false,
|
||
|
||
// We favor the type argument list interpretation when it is immediately followed by
|
||
// a line break, a binary operator, or something that can't start an expression.
|
||
else => p.lexer.has_newline_before or isBinaryOperator(p) or !isStartOfExpression(p),
|
||
};
|
||
}
|
||
|
||
pub const Metadata = union(enum) {
|
||
m_none: void,
|
||
|
||
m_never: void,
|
||
m_unknown: void,
|
||
m_any: void,
|
||
m_void: void,
|
||
m_null: void,
|
||
m_undefined: void,
|
||
m_function: void,
|
||
m_array: void,
|
||
m_boolean: void,
|
||
m_string: void,
|
||
m_object: void,
|
||
m_number: void,
|
||
m_bigint: void,
|
||
m_symbol: void,
|
||
m_promise: void,
|
||
m_identifier: Ref,
|
||
m_dot: List(Ref),
|
||
|
||
pub const default: @This() = .m_none;
|
||
|
||
// the logic in finishUnion, mergeUnion, finishIntersection and mergeIntersection is
|
||
// translated from:
|
||
// https://github.com/microsoft/TypeScript/blob/e0a324b0503be479f2b33fd2e17c6e86c94d1297/src/compiler/transformers/typeSerializer.ts#L402
|
||
|
||
/// Return the final union type if possible, or return null to continue merging.
|
||
///
|
||
/// If the current type is m_never, m_null, or m_undefined assign the current type
|
||
/// to m_none and return null to ensure it's always replaced by the next type.
|
||
pub fn finishUnion(current: *@This(), p: anytype) ?@This() {
|
||
return switch (current.*) {
|
||
.m_identifier => |ref| {
|
||
if (strings.eqlComptime(p.loadNameFromRef(ref), "Object")) {
|
||
return .m_object;
|
||
}
|
||
return null;
|
||
},
|
||
|
||
.m_unknown,
|
||
.m_any,
|
||
.m_object,
|
||
=> .m_object,
|
||
|
||
.m_never,
|
||
.m_null,
|
||
.m_undefined,
|
||
=> {
|
||
current.* = .m_none;
|
||
return null;
|
||
},
|
||
|
||
else => null,
|
||
};
|
||
}
|
||
|
||
pub fn mergeUnion(result: *@This(), left: @This()) void {
|
||
if (left != .m_none) {
|
||
if (std.meta.activeTag(result.*) != std.meta.activeTag(left)) {
|
||
result.* = switch (result.*) {
|
||
.m_never,
|
||
.m_undefined,
|
||
.m_null,
|
||
=> left,
|
||
|
||
else => .m_object,
|
||
};
|
||
} else {
|
||
switch (result.*) {
|
||
.m_identifier => |ref| {
|
||
if (!ref.eql(left.m_identifier)) {
|
||
result.* = .m_object;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
} else {
|
||
// always take the next value if left is m_none
|
||
}
|
||
}
|
||
|
||
/// Return the final intersection type if possible, or return null to continue merging.
|
||
///
|
||
/// If the current type is m_unknown, m_null, or m_undefined assign the current type
|
||
/// to m_none and return null to ensure it's always replaced by the next type.
|
||
pub fn finishIntersection(current: *@This(), p: anytype) ?@This() {
|
||
return switch (current.*) {
|
||
.m_identifier => |ref| {
|
||
if (strings.eqlComptime(p.loadNameFromRef(ref), "Object")) {
|
||
return .m_object;
|
||
}
|
||
return null;
|
||
},
|
||
|
||
// ensure m_never is the final type
|
||
.m_never => .m_never,
|
||
|
||
.m_any,
|
||
.m_object,
|
||
=> .m_object,
|
||
|
||
.m_unknown,
|
||
.m_null,
|
||
.m_undefined,
|
||
=> {
|
||
current.* = .m_none;
|
||
return null;
|
||
},
|
||
|
||
else => null,
|
||
};
|
||
}
|
||
|
||
pub fn mergeIntersection(result: *@This(), left: @This()) void {
|
||
if (left != .m_none) {
|
||
if (std.meta.activeTag(result.*) != std.meta.activeTag(left)) {
|
||
result.* = switch (result.*) {
|
||
.m_unknown,
|
||
.m_undefined,
|
||
.m_null,
|
||
=> left,
|
||
|
||
// ensure m_never is the final type
|
||
.m_never => .m_never,
|
||
|
||
else => .m_object,
|
||
};
|
||
} else {
|
||
switch (result.*) {
|
||
.m_identifier => |ref| {
|
||
if (!ref.eql(left.m_identifier)) {
|
||
result.* = .m_object;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
} else {
|
||
// make sure intersection of only m_unknown serializes to "undefined"
|
||
// instead of "Object"
|
||
if (result.* == .m_unknown) {
|
||
result.* = .m_undefined;
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
pub fn isTSArrowFnJSX(p: anytype) !bool {
|
||
const old_lexer = p.lexer;
|
||
|
||
try p.lexer.next();
|
||
// Look ahead to see if this should be an arrow function instead
|
||
var is_ts_arrow_fn = false;
|
||
|
||
if (p.lexer.token == .t_const) {
|
||
try p.lexer.next();
|
||
}
|
||
if (p.lexer.token == .t_identifier) {
|
||
try p.lexer.next();
|
||
if (p.lexer.token == .t_comma) {
|
||
is_ts_arrow_fn = true;
|
||
} else if (p.lexer.token == .t_extends) {
|
||
try p.lexer.next();
|
||
is_ts_arrow_fn = p.lexer.token != .t_equals and p.lexer.token != .t_greater_than;
|
||
}
|
||
}
|
||
|
||
// Restore the lexer
|
||
p.lexer.restore(&old_lexer);
|
||
return is_ts_arrow_fn;
|
||
}
|
||
|
||
// This function is taken from the official TypeScript compiler source code:
|
||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||
fn isBinaryOperator(p: anytype) bool {
|
||
return switch (p.lexer.token) {
|
||
.t_in => p.allow_in,
|
||
|
||
.t_question_question,
|
||
.t_bar_bar,
|
||
.t_ampersand_ampersand,
|
||
.t_bar,
|
||
.t_caret,
|
||
.t_ampersand,
|
||
.t_equals_equals,
|
||
.t_exclamation_equals,
|
||
.t_equals_equals_equals,
|
||
.t_exclamation_equals_equals,
|
||
.t_less_than,
|
||
.t_greater_than,
|
||
.t_less_than_equals,
|
||
.t_greater_than_equals,
|
||
.t_instanceof,
|
||
.t_less_than_less_than,
|
||
.t_greater_than_greater_than,
|
||
.t_greater_than_greater_than_greater_than,
|
||
.t_plus,
|
||
.t_minus,
|
||
.t_asterisk,
|
||
.t_slash,
|
||
.t_percent,
|
||
.t_asterisk_asterisk,
|
||
=> true,
|
||
.t_identifier => p.lexer.isContextualKeyword("as") or p.lexer.isContextualKeyword("satisfies"),
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
// This function is taken from the official TypeScript compiler source code:
|
||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||
fn isStartOfLeftHandSideExpression(p: anytype) bool {
|
||
return switch (p.lexer.token) {
|
||
.t_this,
|
||
.t_super,
|
||
.t_null,
|
||
.t_true,
|
||
.t_false,
|
||
.t_numeric_literal,
|
||
.t_big_integer_literal,
|
||
.t_string_literal,
|
||
.t_no_substitution_template_literal,
|
||
.t_template_head,
|
||
.t_open_paren,
|
||
.t_open_bracket,
|
||
.t_open_brace,
|
||
.t_function,
|
||
.t_class,
|
||
.t_new,
|
||
.t_slash,
|
||
.t_slash_equals,
|
||
.t_identifier,
|
||
=> true,
|
||
.t_import => lookAheadNextTokenIsOpenParenOrLessThanOrDot(p),
|
||
else => isIdentifier(p),
|
||
};
|
||
}
|
||
|
||
fn lookAheadNextTokenIsOpenParenOrLessThanOrDot(p: anytype) bool {
|
||
const old_lexer = p.lexer;
|
||
const old_log_disabled = p.lexer.is_log_disabled;
|
||
p.lexer.is_log_disabled = true;
|
||
defer {
|
||
p.lexer.restore(&old_lexer);
|
||
p.lexer.is_log_disabled = old_log_disabled;
|
||
}
|
||
p.lexer.next() catch {};
|
||
|
||
return switch (p.lexer.token) {
|
||
.t_open_paren, .t_less_than, .t_dot => true,
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
// This function is taken from the official TypeScript compiler source code:
|
||
// https://github.com/microsoft/TypeScript/blob/master/src/compiler/parser.ts
|
||
fn isIdentifier(p: anytype) bool {
|
||
if (p.lexer.token == .t_identifier) {
|
||
// If we have a 'yield' keyword, and we're in the [yield] context, then 'yield' is
|
||
// considered a keyword and is not an identifier.
|
||
if (p.fn_or_arrow_data_parse.allow_yield != .allow_ident and strings.eqlComptime(p.lexer.identifier, "yield")) {
|
||
return false;
|
||
}
|
||
|
||
// If we have an 'await' keyword, and we're in the [await] context, then 'await' is
|
||
// considered a keyword and is not an identifier.
|
||
if (p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(p.lexer.identifier, "await")) {
|
||
return false;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
fn isStartOfExpression(p: anytype) bool {
|
||
if (isStartOfLeftHandSideExpression(p))
|
||
return true;
|
||
|
||
switch (p.lexer.token) {
|
||
.t_plus,
|
||
.t_minus,
|
||
.t_tilde,
|
||
.t_exclamation,
|
||
.t_delete,
|
||
.t_typeof,
|
||
.t_void,
|
||
.t_plus_plus,
|
||
.t_minus_minus,
|
||
.t_less_than,
|
||
.t_private_identifier,
|
||
.t_at,
|
||
=> return true,
|
||
else => {
|
||
if (p.lexer.token == .t_identifier and (strings.eqlComptime(p.lexer.identifier, "await") or strings.eqlComptime(p.lexer.identifier, "yield"))) {
|
||
// Yield/await always starts an expression. Either it is an identifier (in which case
|
||
// it is definitely an expression). Or it's a keyword (either because we're in
|
||
// a generator or async function, or in strict mode (or both)) and it started a yield or await expression.
|
||
return true;
|
||
}
|
||
|
||
// Error tolerance. If we see the start of some binary operator, we consider
|
||
// that the start of an expression. That way we'll parse out a missing identifier,
|
||
// give a good message about an identifier being missing, and then consume the
|
||
// rest of the binary expression.
|
||
if (isBinaryOperator(p)) {
|
||
return true;
|
||
}
|
||
|
||
return isIdentifier(p);
|
||
},
|
||
}
|
||
|
||
unreachable;
|
||
}
|
||
|
||
pub const Identifier = struct {
|
||
pub const StmtIdentifier = enum {
|
||
s_type,
|
||
|
||
s_namespace,
|
||
|
||
s_abstract,
|
||
|
||
s_module,
|
||
|
||
s_interface,
|
||
|
||
s_declare,
|
||
};
|
||
pub fn forStr(str: string) ?StmtIdentifier {
|
||
switch (str.len) {
|
||
"type".len => return if (strings.eqlComptimeIgnoreLen(str, "type"))
|
||
.s_type
|
||
else
|
||
null,
|
||
"interface".len => {
|
||
if (strings.eqlComptime(str, "interface")) {
|
||
return .s_interface;
|
||
} else if (strings.eqlComptime(str, "namespace")) {
|
||
return .s_namespace;
|
||
} else {
|
||
return null;
|
||
}
|
||
},
|
||
"abstract".len => {
|
||
if (strings.eqlComptime(str, "abstract")) {
|
||
return .s_abstract;
|
||
} else {
|
||
return null;
|
||
}
|
||
},
|
||
"declare".len => {
|
||
if (strings.eqlComptime(str, "declare")) {
|
||
return .s_declare;
|
||
} else {
|
||
return null;
|
||
}
|
||
},
|
||
"module".len => {
|
||
if (strings.eqlComptime(str, "module")) {
|
||
return .s_module;
|
||
} else {
|
||
return null;
|
||
}
|
||
},
|
||
else => return null,
|
||
}
|
||
}
|
||
pub const IMap = bun.ComptimeStringMap(Kind, .{
|
||
.{ "unique", .unique },
|
||
.{ "abstract", .abstract },
|
||
.{ "asserts", .asserts },
|
||
|
||
.{ "keyof", .prefix_keyof },
|
||
.{ "readonly", .prefix_readonly },
|
||
|
||
.{ "any", .primitive_any },
|
||
.{ "never", .primitive_never },
|
||
.{ "unknown", .primitive_unknown },
|
||
.{ "undefined", .primitive_undefined },
|
||
.{ "object", .primitive_object },
|
||
.{ "number", .primitive_number },
|
||
.{ "string", .primitive_string },
|
||
.{ "boolean", .primitive_boolean },
|
||
.{ "bigint", .primitive_bigint },
|
||
.{ "symbol", .primitive_symbol },
|
||
|
||
.{ "infer", .infer },
|
||
});
|
||
pub const Kind = enum {
|
||
normal,
|
||
unique,
|
||
abstract,
|
||
asserts,
|
||
prefix_keyof,
|
||
prefix_readonly,
|
||
primitive_any,
|
||
primitive_never,
|
||
primitive_unknown,
|
||
primitive_undefined,
|
||
primitive_object,
|
||
primitive_number,
|
||
primitive_string,
|
||
primitive_boolean,
|
||
primitive_bigint,
|
||
primitive_symbol,
|
||
infer,
|
||
};
|
||
};
|
||
|
||
pub const SkipTypeOptions = enum {
|
||
is_return_type,
|
||
is_index_signature,
|
||
allow_tuple_labels,
|
||
disallow_conditional_types,
|
||
|
||
pub const Bitset = std.enums.EnumSet(@This());
|
||
pub const empty = Bitset.initEmpty();
|
||
};
|
||
};
|
||
|
||
pub const ImportScanner = struct {
|
||
stmts: []Stmt = &.{},
|
||
kept_import_equals: bool = false,
|
||
removed_import_equals: bool = false,
|
||
|
||
pub fn scan(
|
||
comptime P: type,
|
||
p: *P,
|
||
stmts: []Stmt,
|
||
will_transform_to_common_js: bool,
|
||
comptime hot_module_reloading_transformations: bool,
|
||
hot_module_reloading_context: if (hot_module_reloading_transformations) *ConvertESMExportsForHmr else void,
|
||
) !ImportScanner {
|
||
var scanner = ImportScanner{};
|
||
var stmts_end: usize = 0;
|
||
const allocator = p.allocator;
|
||
const is_typescript_enabled: bool = comptime P.parser_features.typescript;
|
||
|
||
for (stmts) |_stmt| {
|
||
var stmt = _stmt; // copy
|
||
switch (stmt.data) {
|
||
.s_import => |import_ptr| {
|
||
var st = import_ptr.*;
|
||
defer import_ptr.* = st;
|
||
|
||
const record: *ImportRecord = &p.import_records.items[st.import_record_index];
|
||
|
||
if (record.path.isMacro()) {
|
||
record.is_unused = true;
|
||
record.path.is_disabled = true;
|
||
continue;
|
||
}
|
||
|
||
// The official TypeScript compiler always removes unused imported
|
||
// symbols. However, we deliberately deviate from the official
|
||
// TypeScript compiler's behavior doing this in a specific scenario:
|
||
// we are not bundling, symbol renaming is off, and the tsconfig.json
|
||
// "importsNotUsedAsValues" setting is present and is not set to
|
||
// "remove".
|
||
//
|
||
// This exists to support the use case of compiling partial modules for
|
||
// compile-to-JavaScript languages such as Svelte. These languages try
|
||
// to reference imports in ways that are impossible for esbuild to know
|
||
// about when esbuild is only given a partial module to compile. Here
|
||
// is an example of some Svelte code that might use esbuild to convert
|
||
// TypeScript to JavaScript:
|
||
//
|
||
// <script lang="ts">
|
||
// import Counter from './Counter.svelte';
|
||
// export let name: string = 'world';
|
||
// </script>
|
||
// <main>
|
||
// <h1>Hello {name}!</h1>
|
||
// <Counter />
|
||
// </main>
|
||
//
|
||
// Tools that use esbuild to compile TypeScript code inside a Svelte
|
||
// file like this only give esbuild the contents of the <script> tag.
|
||
// These tools work around this missing import problem when using the
|
||
// official TypeScript compiler by hacking the TypeScript AST to
|
||
// remove the "unused import" flags. This isn't possible in esbuild
|
||
// because esbuild deliberately does not expose an AST manipulation
|
||
// API for performance reasons.
|
||
//
|
||
// We deviate from the TypeScript compiler's behavior in this specific
|
||
// case because doing so is useful for these compile-to-JavaScript
|
||
// languages and is benign in other cases. The rationale is as follows:
|
||
//
|
||
// * If "importsNotUsedAsValues" is absent or set to "remove", then
|
||
// we don't know if these imports are values or types. It's not
|
||
// safe to keep them because if they are types, the missing imports
|
||
// will cause run-time failures because there will be no matching
|
||
// exports. It's only safe keep imports if "importsNotUsedAsValues"
|
||
// is set to "preserve" or "error" because then we can assume that
|
||
// none of the imports are types (since the TypeScript compiler
|
||
// would generate an error in that case).
|
||
//
|
||
// * If we're bundling, then we know we aren't being used to compile
|
||
// a partial module. The parser is seeing the entire code for the
|
||
// module so it's safe to remove unused imports. And also we don't
|
||
// want the linker to generate errors about missing imports if the
|
||
// imported file is also in the bundle.
|
||
//
|
||
// * If identifier minification is enabled, then using esbuild as a
|
||
// partial-module transform library wouldn't work anyway because
|
||
// the names wouldn't match. And that means we're minifying so the
|
||
// user is expecting the output to be as small as possible. So we
|
||
// should omit unused imports.
|
||
//
|
||
var did_remove_star_loc = false;
|
||
const keep_unused_imports = !p.options.features.trim_unused_imports;
|
||
// TypeScript always trims unused imports. This is important for
|
||
// correctness since some imports might be fake (only in the type
|
||
// system and used for type-only imports).
|
||
if (!keep_unused_imports) {
|
||
var found_imports = false;
|
||
var is_unused_in_typescript = true;
|
||
|
||
if (st.default_name) |default_name| {
|
||
found_imports = true;
|
||
const symbol = p.symbols.items[default_name.ref.?.innerIndex()];
|
||
|
||
// TypeScript has a separate definition of unused
|
||
if (is_typescript_enabled and p.ts_use_counts.items[default_name.ref.?.innerIndex()] != 0) {
|
||
is_unused_in_typescript = false;
|
||
}
|
||
|
||
// Remove the symbol if it's never used outside a dead code region
|
||
if (symbol.use_count_estimate == 0) {
|
||
st.default_name = null;
|
||
}
|
||
}
|
||
|
||
// Remove the star import if it's unused
|
||
if (st.star_name_loc) |_| {
|
||
found_imports = true;
|
||
const symbol = p.symbols.items[st.namespace_ref.innerIndex()];
|
||
|
||
// TypeScript has a separate definition of unused
|
||
if (is_typescript_enabled and p.ts_use_counts.items[st.namespace_ref.innerIndex()] != 0) {
|
||
is_unused_in_typescript = false;
|
||
}
|
||
|
||
// Remove the symbol if it's never used outside a dead code region
|
||
if (symbol.use_count_estimate == 0) {
|
||
// Make sure we don't remove this if it was used for a property
|
||
// access while bundling
|
||
var has_any = false;
|
||
|
||
if (p.import_items_for_namespace.get(st.namespace_ref)) |entry| {
|
||
if (entry.count() > 0) {
|
||
has_any = true;
|
||
}
|
||
}
|
||
|
||
if (!has_any) {
|
||
st.star_name_loc = null;
|
||
did_remove_star_loc = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Remove items if they are unused
|
||
if (st.items.len > 0) {
|
||
found_imports = true;
|
||
var items_end: usize = 0;
|
||
for (st.items) |item| {
|
||
const ref = item.name.ref.?;
|
||
const symbol: Symbol = p.symbols.items[ref.innerIndex()];
|
||
|
||
// TypeScript has a separate definition of unused
|
||
if (is_typescript_enabled and p.ts_use_counts.items[ref.innerIndex()] != 0) {
|
||
is_unused_in_typescript = false;
|
||
}
|
||
|
||
// Remove the symbol if it's never used outside a dead code region
|
||
if (symbol.use_count_estimate != 0) {
|
||
st.items[items_end] = item;
|
||
items_end += 1;
|
||
}
|
||
}
|
||
|
||
st.items = st.items[0..items_end];
|
||
}
|
||
|
||
// -- Original Comment --
|
||
// Omit this statement if we're parsing TypeScript and all imports are
|
||
// unused. Note that this is distinct from the case where there were
|
||
// no imports at all (e.g. "import 'foo'"). In that case we want to keep
|
||
// the statement because the user is clearly trying to import the module
|
||
// for side effects.
|
||
//
|
||
// This culling is important for correctness when parsing TypeScript
|
||
// because a) the TypeScript compiler does this and we want to match it
|
||
// and b) this may be a fake module that only exists in the type system
|
||
// and doesn't actually exist in reality.
|
||
//
|
||
// We do not want to do this culling in JavaScript though because the
|
||
// module may have side effects even if all imports are unused.
|
||
// -- Original Comment --
|
||
|
||
// jarred: I think, in this project, we want this behavior, even in JavaScript.
|
||
// I think this would be a big performance improvement.
|
||
// The less you import, the less code you transpile.
|
||
// Side-effect imports are nearly always done through identifier-less imports
|
||
// e.g. `import 'fancy-stylesheet-thing/style.css';`
|
||
// This is a breaking change though. We can make it an option with some guardrail
|
||
// so maybe if it errors, it shows a suggestion "retry without trimming unused imports"
|
||
if ((is_typescript_enabled and found_imports and is_unused_in_typescript and !p.options.preserve_unused_imports_ts) or
|
||
(!is_typescript_enabled and p.options.features.trim_unused_imports and found_imports and st.star_name_loc == null and st.items.len == 0 and st.default_name == null))
|
||
{
|
||
// internal imports are presumed to be always used
|
||
// require statements cannot be stripped
|
||
if (!record.is_internal and !record.was_originally_require) {
|
||
record.is_unused = true;
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
|
||
const namespace_ref = st.namespace_ref;
|
||
const convert_star_to_clause = !p.options.bundle and (p.symbols.items[namespace_ref.innerIndex()].use_count_estimate == 0);
|
||
|
||
if (convert_star_to_clause and !keep_unused_imports) {
|
||
st.star_name_loc = null;
|
||
}
|
||
|
||
record.contains_default_alias = record.contains_default_alias or st.default_name != null;
|
||
|
||
const existing_items: ImportItemForNamespaceMap = p.import_items_for_namespace.get(namespace_ref) orelse
|
||
ImportItemForNamespaceMap.init(allocator);
|
||
|
||
if (p.options.bundle) {
|
||
if (st.star_name_loc != null and existing_items.count() > 0) {
|
||
const sorted = try allocator.alloc(string, existing_items.count());
|
||
defer allocator.free(sorted);
|
||
for (sorted, existing_items.keys()) |*result, alias| {
|
||
result.* = alias;
|
||
}
|
||
strings.sortDesc(sorted);
|
||
p.named_imports.ensureUnusedCapacity(p.allocator, sorted.len) catch bun.outOfMemory();
|
||
|
||
// Create named imports for these property accesses. This will
|
||
// cause missing imports to generate useful warnings.
|
||
//
|
||
// It will also improve bundling efficiency for internal imports
|
||
// by still converting property accesses off the namespace into
|
||
// bare identifiers even if the namespace is still needed.
|
||
for (sorted) |alias| {
|
||
const item = existing_items.get(alias).?;
|
||
p.named_imports.put(
|
||
p.allocator,
|
||
item.ref.?,
|
||
js_ast.NamedImport{
|
||
.alias = alias,
|
||
.alias_loc = item.loc,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = st.import_record_index,
|
||
},
|
||
) catch bun.outOfMemory();
|
||
|
||
const name: LocRef = item;
|
||
const name_ref = name.ref.?;
|
||
|
||
// Make sure the printer prints this as a property access
|
||
var symbol: *Symbol = &p.symbols.items[name_ref.innerIndex()];
|
||
|
||
symbol.namespace_alias = G.NamespaceAlias{
|
||
.namespace_ref = namespace_ref,
|
||
.alias = alias,
|
||
.import_record_index = st.import_record_index,
|
||
.was_originally_property_access = st.star_name_loc != null and existing_items.contains(symbol.original_name),
|
||
};
|
||
|
||
// Also record these automatically-generated top-level namespace alias symbols
|
||
p.declared_symbols.append(p.allocator, .{
|
||
.ref = name_ref,
|
||
.is_top_level = true,
|
||
}) catch unreachable;
|
||
}
|
||
}
|
||
|
||
p.named_imports.ensureUnusedCapacity(
|
||
p.allocator,
|
||
st.items.len + @as(usize, @intFromBool(st.default_name != null)) + @as(usize, @intFromBool(st.star_name_loc != null)),
|
||
) catch bun.outOfMemory();
|
||
|
||
if (st.star_name_loc) |loc| {
|
||
record.contains_import_star = true;
|
||
p.named_imports.putAssumeCapacity(
|
||
namespace_ref,
|
||
js_ast.NamedImport{
|
||
.alias_is_star = true,
|
||
.alias = "",
|
||
.alias_loc = loc,
|
||
.namespace_ref = Ref.None,
|
||
.import_record_index = st.import_record_index,
|
||
},
|
||
);
|
||
}
|
||
|
||
if (st.default_name) |default| {
|
||
record.contains_default_alias = true;
|
||
p.named_imports.putAssumeCapacity(
|
||
default.ref.?,
|
||
.{
|
||
.alias = "default",
|
||
.alias_loc = default.loc,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = st.import_record_index,
|
||
},
|
||
);
|
||
}
|
||
|
||
for (st.items) |item| {
|
||
const name: LocRef = item.name;
|
||
const name_ref = name.ref.?;
|
||
|
||
p.named_imports.putAssumeCapacity(
|
||
name_ref,
|
||
js_ast.NamedImport{
|
||
.alias = item.alias,
|
||
.alias_loc = name.loc,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = st.import_record_index,
|
||
},
|
||
);
|
||
}
|
||
} else {
|
||
// ESM requires live bindings
|
||
// CommonJS does not require live bindings
|
||
// We load ESM in browsers & in Bun.js
|
||
// We have to simulate live bindings for cases where the code is bundled
|
||
// We do not know at this stage whether or not the import statement is bundled
|
||
// This keeps track of the `namespace_alias` incase, at printing time, we determine that we should print it with the namespace
|
||
for (st.items) |item| {
|
||
record.contains_default_alias = record.contains_default_alias or strings.eqlComptime(item.alias, "default");
|
||
|
||
const name: LocRef = item.name;
|
||
const name_ref = name.ref.?;
|
||
|
||
try p.named_imports.put(p.allocator, name_ref, js_ast.NamedImport{
|
||
.alias = item.alias,
|
||
.alias_loc = name.loc,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = st.import_record_index,
|
||
});
|
||
|
||
// Make sure the printer prints this as a property access
|
||
var symbol: *Symbol = &p.symbols.items[name_ref.innerIndex()];
|
||
if (record.contains_import_star or st.star_name_loc != null)
|
||
symbol.namespace_alias = G.NamespaceAlias{
|
||
.namespace_ref = namespace_ref,
|
||
.alias = item.alias,
|
||
.import_record_index = st.import_record_index,
|
||
.was_originally_property_access = st.star_name_loc != null and existing_items.contains(symbol.original_name),
|
||
};
|
||
}
|
||
|
||
if (record.was_originally_require) {
|
||
var symbol = &p.symbols.items[namespace_ref.innerIndex()];
|
||
symbol.namespace_alias = G.NamespaceAlias{
|
||
.namespace_ref = namespace_ref,
|
||
.alias = "",
|
||
.import_record_index = st.import_record_index,
|
||
.was_originally_property_access = false,
|
||
};
|
||
}
|
||
}
|
||
|
||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||
|
||
record.contains_import_star = record.contains_import_star or st.star_name_loc != null;
|
||
record.contains_default_alias = record.contains_default_alias or st.default_name != null;
|
||
|
||
for (st.items) |*item| {
|
||
record.contains_default_alias = record.contains_default_alias or strings.eqlComptime(item.alias, "default");
|
||
record.contains_es_module_alias = record.contains_es_module_alias or strings.eqlComptime(item.alias, "__esModule");
|
||
}
|
||
},
|
||
|
||
.s_function => |st| {
|
||
if (st.func.flags.contains(.is_export)) {
|
||
if (st.func.name) |name| {
|
||
const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name;
|
||
try p.recordExport(name.loc, original_name, name.ref.?);
|
||
} else {
|
||
try p.log.addRangeError(p.source, logger.Range{ .loc = st.func.open_parens_loc, .len = 2 }, "Exported functions must have a name");
|
||
}
|
||
}
|
||
},
|
||
.s_class => |st| {
|
||
if (st.is_export) {
|
||
if (st.class.class_name) |name| {
|
||
try p.recordExport(name.loc, p.symbols.items[name.ref.?.innerIndex()].original_name, name.ref.?);
|
||
} else {
|
||
try p.log.addRangeError(p.source, logger.Range{ .loc = st.class.body_loc, .len = 0 }, "Exported classes must have a name");
|
||
}
|
||
}
|
||
},
|
||
.s_local => |st| {
|
||
if (st.is_export) {
|
||
for (st.decls.slice()) |decl| {
|
||
p.recordExportedBinding(decl.binding);
|
||
}
|
||
}
|
||
|
||
// Remove unused import-equals statements, since those likely
|
||
// correspond to types instead of values
|
||
if (st.was_ts_import_equals and !st.is_export and st.decls.len > 0) {
|
||
var decl = st.decls.ptr[0];
|
||
|
||
// Skip to the underlying reference
|
||
var value = decl.value;
|
||
if (decl.value != null) {
|
||
while (true) {
|
||
if (@as(Expr.Tag, value.?.data) == .e_dot) {
|
||
value = value.?.data.e_dot.target;
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Is this an identifier reference and not a require() call?
|
||
if (value) |val| {
|
||
if (@as(Expr.Tag, val.data) == .e_identifier) {
|
||
// Is this import statement unused?
|
||
if (@as(Binding.Tag, decl.binding.data) == .b_identifier and p.symbols.items[decl.binding.data.b_identifier.ref.innerIndex()].use_count_estimate == 0) {
|
||
p.ignoreUsage(val.data.e_identifier.ref);
|
||
|
||
scanner.removed_import_equals = true;
|
||
continue;
|
||
} else {
|
||
scanner.kept_import_equals = true;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// when bundling, all top-level variables become var
|
||
// TODO(@paperdave): we already do this earlier in visiting?
|
||
if (!hot_module_reloading_transformations and p.options.bundle and !st.kind.isUsing()) {
|
||
st.kind = .k_var;
|
||
}
|
||
},
|
||
.s_export_default => |st| {
|
||
// This is defer'd so that we still record export default for identifiers
|
||
defer {
|
||
if (st.default_name.ref) |ref| {
|
||
p.recordExport(st.default_name.loc, "default", ref) catch {};
|
||
}
|
||
}
|
||
|
||
// Rewrite this export to be:
|
||
// exports.default =
|
||
// But only if it's anonymous
|
||
if (!hot_module_reloading_transformations and will_transform_to_common_js and P != bun.bundle_v2.AstBuilder) {
|
||
const expr = st.value.toExpr();
|
||
var export_default_args = try p.allocator.alloc(Expr, 2);
|
||
export_default_args[0] = p.@"module.exports"(expr.loc);
|
||
export_default_args[1] = expr;
|
||
stmt = p.s(S.SExpr{ .value = p.callRuntime(expr.loc, "__exportDefault", export_default_args) }, expr.loc);
|
||
}
|
||
},
|
||
.s_export_clause => |st| {
|
||
for (st.items) |item| {
|
||
try p.recordExport(item.alias_loc, item.alias, item.name.ref.?);
|
||
}
|
||
},
|
||
.s_export_star => |st| {
|
||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||
|
||
if (st.alias) |alias| {
|
||
// "export * as ns from 'path'"
|
||
try p.named_imports.put(p.allocator, st.namespace_ref, js_ast.NamedImport{
|
||
.alias = null,
|
||
.alias_is_star = true,
|
||
.alias_loc = alias.loc,
|
||
.namespace_ref = Ref.None,
|
||
.import_record_index = st.import_record_index,
|
||
.is_exported = true,
|
||
});
|
||
try p.recordExport(alias.loc, alias.original_name, st.namespace_ref);
|
||
var record = &p.import_records.items[st.import_record_index];
|
||
record.contains_import_star = true;
|
||
} else {
|
||
// "export * from 'path'"
|
||
try p.export_star_import_records.append(allocator, st.import_record_index);
|
||
}
|
||
},
|
||
.s_export_from => |st| {
|
||
try p.import_records_for_current_part.append(allocator, st.import_record_index);
|
||
p.named_imports.ensureUnusedCapacity(p.allocator, st.items.len) catch unreachable;
|
||
for (st.items) |item| {
|
||
const ref = item.name.ref orelse p.panic("Expected export from item to have a name {any}", .{st});
|
||
// Note that the imported alias is not item.Alias, which is the
|
||
// exported alias. This is somewhat confusing because each
|
||
// SExportFrom statement is basically SImport + SExportClause in one.
|
||
try p.named_imports.put(p.allocator, ref, js_ast.NamedImport{
|
||
.alias_is_star = false,
|
||
.alias = item.original_name,
|
||
.alias_loc = item.name.loc,
|
||
.namespace_ref = st.namespace_ref,
|
||
.import_record_index = st.import_record_index,
|
||
.is_exported = true,
|
||
});
|
||
try p.recordExport(item.name.loc, item.alias, ref);
|
||
|
||
var record = &p.import_records.items[st.import_record_index];
|
||
if (strings.eqlComptime(item.original_name, "default")) {
|
||
record.contains_default_alias = true;
|
||
} else if (strings.eqlComptime(item.original_name, "__esModule")) {
|
||
record.contains_es_module_alias = true;
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (hot_module_reloading_transformations) {
|
||
try hot_module_reloading_context.convertStmt(p, stmt);
|
||
} else {
|
||
stmts[stmts_end] = stmt;
|
||
stmts_end += 1;
|
||
}
|
||
}
|
||
|
||
if (!hot_module_reloading_transformations)
|
||
scanner.stmts = stmts[0..stmts_end];
|
||
|
||
return scanner;
|
||
}
|
||
};
|
||
|
||
/// We must prevent collisions from generated names with user's names.
|
||
///
|
||
/// When transpiling for the runtime, we want to avoid adding a pass over all
|
||
/// the symbols in the file (we do this in the bundler since there is more than
|
||
/// one file, and user symbols from different files may collide with each
|
||
/// other).
|
||
///
|
||
/// This makes sure that there's the lowest possible chance of having a generated name
|
||
/// collide with a user's name. This is the easiest way to do so
|
||
pub inline fn generatedSymbolName(name: []const u8) []const u8 {
|
||
comptime {
|
||
const hash = std.hash.Wyhash.hash(0, name);
|
||
const hash_str = std.fmt.comptimePrint("_{}", .{bun.fmt.truncatedHash32(@intCast(hash))});
|
||
return name ++ hash_str;
|
||
}
|
||
}
|
||
|
||
pub const SideEffects = enum(u1) {
|
||
could_have_side_effects,
|
||
no_side_effects,
|
||
|
||
pub const Result = struct {
|
||
side_effects: SideEffects,
|
||
ok: bool = false,
|
||
value: bool = false,
|
||
};
|
||
|
||
pub fn canChangeStrictToLoose(lhs: Expr.Data, rhs: Expr.Data) bool {
|
||
const left = lhs.knownPrimitive();
|
||
const right = rhs.knownPrimitive();
|
||
return left == right and left != .unknown and left != .mixed;
|
||
}
|
||
|
||
pub fn simplifyBoolean(p: anytype, expr: Expr) Expr {
|
||
if (!p.options.features.dead_code_elimination) return expr;
|
||
switch (expr.data) {
|
||
.e_unary => |e| {
|
||
if (e.op == .un_not) {
|
||
// "!!a" => "a"
|
||
if (e.value.data == .e_unary and e.value.data.e_unary.op == .un_not) {
|
||
return simplifyBoolean(p, e.value.data.e_unary.value);
|
||
}
|
||
|
||
e.value = simplifyBoolean(p, e.value);
|
||
}
|
||
},
|
||
.e_binary => |e| {
|
||
switch (e.op) {
|
||
.bin_logical_and => {
|
||
const effects = SideEffects.toBoolean(p, e.right.data);
|
||
if (effects.ok and effects.value and effects.side_effects == .no_side_effects) {
|
||
// "if (anything && truthyNoSideEffects)" => "if (anything)"
|
||
return e.left;
|
||
}
|
||
},
|
||
.bin_logical_or => {
|
||
const effects = SideEffects.toBoolean(p, e.right.data);
|
||
if (effects.ok and !effects.value and effects.side_effects == .no_side_effects) {
|
||
// "if (anything || falsyNoSideEffects)" => "if (anything)"
|
||
return e.left;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return expr;
|
||
}
|
||
|
||
pub const toNumber = Expr.Data.toNumber;
|
||
pub const typeof = Expr.Data.toTypeof;
|
||
|
||
pub fn isPrimitiveToReorder(data: Expr.Data) bool {
|
||
return switch (data) {
|
||
.e_null,
|
||
.e_undefined,
|
||
.e_string,
|
||
.e_boolean,
|
||
.e_number,
|
||
.e_big_int,
|
||
.e_inlined_enum,
|
||
.e_require_main,
|
||
=> true,
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
pub fn simplifyUnusedExpr(p: anytype, expr: Expr) ?Expr {
|
||
if (!p.options.features.dead_code_elimination) return expr;
|
||
switch (expr.data) {
|
||
.e_null,
|
||
.e_undefined,
|
||
.e_missing,
|
||
.e_boolean,
|
||
.e_number,
|
||
.e_big_int,
|
||
.e_string,
|
||
.e_this,
|
||
.e_reg_exp,
|
||
.e_function,
|
||
.e_arrow,
|
||
.e_import_meta,
|
||
.e_inlined_enum,
|
||
=> {
|
||
return null;
|
||
},
|
||
|
||
.e_dot => |dot| {
|
||
if (dot.can_be_removed_if_unused) {
|
||
return null;
|
||
}
|
||
},
|
||
.e_identifier => |ident| {
|
||
if (ident.must_keep_due_to_with_stmt) {
|
||
return expr;
|
||
}
|
||
|
||
if (ident.can_be_removed_if_unused or p.symbols.items[ident.ref.innerIndex()].kind != .unbound) {
|
||
return null;
|
||
}
|
||
},
|
||
.e_if => |__if__| {
|
||
__if__.yes = simplifyUnusedExpr(p, __if__.yes) orelse __if__.yes.toEmpty();
|
||
__if__.no = simplifyUnusedExpr(p, __if__.no) orelse __if__.no.toEmpty();
|
||
|
||
// "foo() ? 1 : 2" => "foo()"
|
||
if (__if__.yes.isEmpty() and __if__.no.isEmpty()) {
|
||
return simplifyUnusedExpr(p, __if__.test_);
|
||
}
|
||
|
||
// "foo() ? 1 : bar()" => "foo() || bar()"
|
||
if (__if__.yes.isEmpty()) {
|
||
return Expr.joinWithLeftAssociativeOp(
|
||
.bin_logical_or,
|
||
__if__.test_,
|
||
__if__.no,
|
||
p.allocator,
|
||
);
|
||
}
|
||
|
||
// "foo() ? bar() : 2" => "foo() && bar()"
|
||
if (__if__.no.isEmpty()) {
|
||
return Expr.joinWithLeftAssociativeOp(
|
||
.bin_logical_and,
|
||
__if__.test_,
|
||
__if__.yes,
|
||
p.allocator,
|
||
);
|
||
}
|
||
},
|
||
.e_unary => |un| {
|
||
// These operators must not have any type conversions that can execute code
|
||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
switch (un.op) {
|
||
.un_void, .un_not => {
|
||
return simplifyUnusedExpr(p, un.value);
|
||
},
|
||
.un_typeof => {
|
||
// "typeof x" must not be transformed into if "x" since doing so could
|
||
// cause an exception to be thrown. Instead we can just remove it since
|
||
// "typeof x" is special-cased in the standard to never throw.
|
||
if (std.meta.activeTag(un.value.data) == .e_identifier) {
|
||
return null;
|
||
}
|
||
|
||
return simplifyUnusedExpr(p, un.value);
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
},
|
||
|
||
inline .e_call, .e_new => |call| {
|
||
// A call that has been marked "__PURE__" can be removed if all arguments
|
||
// can be removed. The annotation causes us to ignore the target.
|
||
if (call.can_be_unwrapped_if_unused) {
|
||
if (call.args.len > 0) {
|
||
return Expr.joinAllWithCommaCallback(call.args.slice(), @TypeOf(p), p, comptime simplifyUnusedExpr, p.allocator);
|
||
} else {
|
||
return Expr.empty;
|
||
}
|
||
}
|
||
},
|
||
|
||
.e_binary => |bin| {
|
||
switch (bin.op) {
|
||
// These operators must not have any type conversions that can execute code
|
||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
.bin_comma,
|
||
=> return simplifyUnusedBinaryCommaExpr(p, expr),
|
||
|
||
// We can simplify "==" and "!=" even though they can call "toString" and/or
|
||
// "valueOf" if we can statically determine that the types of both sides are
|
||
// primitives. In that case there won't be any chance for user-defined
|
||
// "toString" and/or "valueOf" to be called.
|
||
.bin_loose_eq,
|
||
.bin_loose_ne,
|
||
=> {
|
||
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
|
||
return Expr.joinWithComma(simplifyUnusedExpr(p, bin.left) orelse bin.left.toEmpty(), simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty(), p.allocator);
|
||
}
|
||
},
|
||
|
||
.bin_logical_and, .bin_logical_or, .bin_nullish_coalescing => {
|
||
bin.right = simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty();
|
||
// Preserve short-circuit behavior: the left expression is only unused if
|
||
// the right expression can be completely removed. Otherwise, the left
|
||
// expression is important for the branch.
|
||
|
||
if (bin.right.isEmpty())
|
||
return simplifyUnusedExpr(p, bin.left);
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
},
|
||
|
||
.e_object => {
|
||
// Objects with "..." spread expressions can't be unwrapped because the
|
||
// "..." triggers code evaluation via getters. In that case, just trim
|
||
// the other items instead and leave the object expression there.
|
||
var properties_slice = expr.data.e_object.properties.slice();
|
||
var end: usize = 0;
|
||
for (properties_slice) |spread| {
|
||
end = 0;
|
||
if (spread.kind == .spread) {
|
||
// Spread properties must always be evaluated
|
||
for (properties_slice) |prop_| {
|
||
var prop = prop_;
|
||
if (prop_.kind != .spread) {
|
||
const value = simplifyUnusedExpr(p, prop.value.?);
|
||
if (value != null) {
|
||
prop.value = value;
|
||
} else if (!prop.flags.contains(.is_computed)) {
|
||
continue;
|
||
} else {
|
||
prop.value = p.newExpr(E.Number{ .value = 0.0 }, prop.value.?.loc);
|
||
}
|
||
}
|
||
|
||
properties_slice[end] = prop_;
|
||
end += 1;
|
||
}
|
||
|
||
properties_slice = properties_slice[0..end];
|
||
expr.data.e_object.properties = G.Property.List.init(properties_slice);
|
||
return expr;
|
||
}
|
||
}
|
||
|
||
var result = Expr.init(E.Missing, E.Missing{}, expr.loc);
|
||
|
||
// Otherwise, the object can be completely removed. We only need to keep any
|
||
// object properties with side effects. Apply this simplification recursively.
|
||
for (properties_slice) |prop| {
|
||
if (prop.flags.contains(.is_computed)) {
|
||
// Make sure "ToString" is still evaluated on the key
|
||
result = result.joinWithComma(
|
||
p.newExpr(
|
||
E.Binary{
|
||
.op = .bin_add,
|
||
.left = prop.key.?,
|
||
.right = p.newExpr(E.String{}, prop.key.?.loc),
|
||
},
|
||
prop.key.?.loc,
|
||
),
|
||
p.allocator,
|
||
);
|
||
}
|
||
result = result.joinWithComma(
|
||
simplifyUnusedExpr(p, prop.value.?) orelse prop.value.?.toEmpty(),
|
||
p.allocator,
|
||
);
|
||
}
|
||
|
||
return result;
|
||
},
|
||
.e_array => {
|
||
var items = expr.data.e_array.items.slice();
|
||
|
||
for (items) |item| {
|
||
if (item.data == .e_spread) {
|
||
var end: usize = 0;
|
||
for (items) |item__| {
|
||
const item_ = item__;
|
||
if (item_.data != .e_missing) {
|
||
items[end] = item_;
|
||
end += 1;
|
||
}
|
||
|
||
expr.data.e_array.items = ExprNodeList.init(items[0..end]);
|
||
return expr;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Otherwise, the array can be completely removed. We only need to keep any
|
||
// array items with side effects. Apply this simplification recursively.
|
||
return Expr.joinAllWithCommaCallback(
|
||
items,
|
||
@TypeOf(p),
|
||
p,
|
||
comptime simplifyUnusedExpr,
|
||
p.allocator,
|
||
);
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
|
||
return expr;
|
||
}
|
||
|
||
const BinaryExpressionSimplifyVisitor = struct {
|
||
bin: *E.Binary,
|
||
};
|
||
|
||
///
|
||
fn simplifyUnusedBinaryCommaExpr(p: anytype, expr: Expr) ?Expr {
|
||
if (Environment.allow_assert) {
|
||
assert(expr.data == .e_binary);
|
||
assert(switch (expr.data.e_binary.op) {
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
.bin_comma,
|
||
=> true,
|
||
else => false,
|
||
});
|
||
}
|
||
const stack: *std.ArrayList(BinaryExpressionSimplifyVisitor) = &p.binary_expression_simplify_stack;
|
||
const stack_bottom = stack.items.len;
|
||
defer stack.shrinkRetainingCapacity(stack_bottom);
|
||
|
||
stack.append(.{ .bin = expr.data.e_binary }) catch bun.outOfMemory();
|
||
|
||
// Build stack up of expressions
|
||
var left: Expr = expr.data.e_binary.left;
|
||
while (left.data.as(.e_binary)) |left_bin| {
|
||
switch (left_bin.op) {
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
.bin_comma,
|
||
=> {
|
||
stack.append(.{ .bin = left_bin }) catch bun.outOfMemory();
|
||
left = left_bin.left;
|
||
},
|
||
else => break,
|
||
}
|
||
}
|
||
|
||
// Ride the stack downwards
|
||
var i = stack.items.len;
|
||
var result = simplifyUnusedExpr(p, left) orelse Expr.empty;
|
||
while (i > stack_bottom) {
|
||
i -= 1;
|
||
const top = stack.items[i];
|
||
const visited_right = simplifyUnusedExpr(p, top.bin.right) orelse Expr.empty;
|
||
result = result.joinWithComma(visited_right, p.allocator);
|
||
}
|
||
|
||
return if (result.isMissing()) Expr.empty else result;
|
||
}
|
||
|
||
fn findIdentifiers(binding: Binding, decls: *std.ArrayList(G.Decl)) void {
|
||
switch (binding.data) {
|
||
.b_identifier => {
|
||
decls.append(.{ .binding = binding }) catch unreachable;
|
||
},
|
||
.b_array => |array| {
|
||
for (array.items) |item| {
|
||
findIdentifiers(item.binding, decls);
|
||
}
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |item| {
|
||
findIdentifiers(item.value, decls);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// If this is in a dead branch, then we want to trim as much dead code as we
|
||
// can. Everything can be trimmed except for hoisted declarations ("var" and
|
||
// "function"), which affect the parent scope. For example:
|
||
//
|
||
// function foo() {
|
||
// if (false) { var x; }
|
||
// x = 1;
|
||
// }
|
||
//
|
||
// We can't trim the entire branch as dead or calling foo() will incorrectly
|
||
// assign to a global variable instead.
|
||
pub fn shouldKeepStmtInDeadControlFlow(p: anytype, stmt: Stmt, allocator: Allocator) bool {
|
||
if (!p.options.features.dead_code_elimination) return true;
|
||
switch (stmt.data) {
|
||
// Omit these statements entirely
|
||
.s_empty, .s_expr, .s_throw, .s_return, .s_break, .s_continue, .s_class, .s_debugger => return false,
|
||
|
||
.s_local => |local| {
|
||
if (local.kind != .k_var) {
|
||
// Omit these statements entirely
|
||
return false;
|
||
}
|
||
|
||
// Omit everything except the identifiers
|
||
|
||
// common case: single var foo = blah, don't need to allocate
|
||
if (local.decls.len == 1 and local.decls.ptr[0].binding.data == .b_identifier) {
|
||
const prev = local.decls.ptr[0];
|
||
stmt.data.s_local.decls.ptr[0] = G.Decl{ .binding = prev.binding };
|
||
return true;
|
||
}
|
||
|
||
var decls = std.ArrayList(G.Decl).initCapacity(allocator, local.decls.len) catch unreachable;
|
||
for (local.decls.slice()) |decl| {
|
||
findIdentifiers(decl.binding, &decls);
|
||
}
|
||
|
||
local.decls.update(decls);
|
||
return true;
|
||
},
|
||
|
||
.s_block => |block| {
|
||
for (block.stmts) |child| {
|
||
if (shouldKeepStmtInDeadControlFlow(p, child, allocator)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
},
|
||
|
||
.s_if => |_if_| {
|
||
if (shouldKeepStmtInDeadControlFlow(p, _if_.yes, allocator)) {
|
||
return true;
|
||
}
|
||
|
||
const no = _if_.no orelse return false;
|
||
|
||
return shouldKeepStmtInDeadControlFlow(p, no, allocator);
|
||
},
|
||
|
||
.s_while => {
|
||
return shouldKeepStmtInDeadControlFlow(p, stmt.data.s_while.body, allocator);
|
||
},
|
||
|
||
.s_do_while => {
|
||
return shouldKeepStmtInDeadControlFlow(p, stmt.data.s_do_while.body, allocator);
|
||
},
|
||
|
||
.s_for => |__for__| {
|
||
if (__for__.init) |init_| {
|
||
if (shouldKeepStmtInDeadControlFlow(p, init_, allocator)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return shouldKeepStmtInDeadControlFlow(p, __for__.body, allocator);
|
||
},
|
||
|
||
.s_for_in => |__for__| {
|
||
return shouldKeepStmtInDeadControlFlow(p, __for__.init, allocator) or shouldKeepStmtInDeadControlFlow(p, __for__.body, allocator);
|
||
},
|
||
|
||
.s_for_of => |__for__| {
|
||
return shouldKeepStmtInDeadControlFlow(p, __for__.init, allocator) or shouldKeepStmtInDeadControlFlow(p, __for__.body, allocator);
|
||
},
|
||
|
||
.s_label => |label| {
|
||
return shouldKeepStmtInDeadControlFlow(p, label.stmt, allocator);
|
||
},
|
||
else => return true,
|
||
}
|
||
}
|
||
|
||
// Returns true if this expression is known to result in a primitive value (i.e.
|
||
// null, undefined, boolean, number, bigint, or string), even if the expression
|
||
// cannot be removed due to side effects.
|
||
pub fn isPrimitiveWithSideEffects(data: Expr.Data) bool {
|
||
switch (data) {
|
||
.e_null,
|
||
.e_undefined,
|
||
.e_boolean,
|
||
.e_number,
|
||
.e_big_int,
|
||
.e_string,
|
||
.e_inlined_enum,
|
||
=> {
|
||
return true;
|
||
},
|
||
.e_unary => |e| {
|
||
switch (e.op) {
|
||
// number or bigint
|
||
.un_pos,
|
||
.un_neg,
|
||
.un_cpl,
|
||
.un_pre_dec,
|
||
.un_pre_inc,
|
||
.un_post_dec,
|
||
.un_post_inc,
|
||
// boolean
|
||
.un_not,
|
||
.un_delete,
|
||
// undefined
|
||
.un_void,
|
||
// string
|
||
.un_typeof,
|
||
=> {
|
||
return true;
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
.e_binary => |e| {
|
||
switch (e.op) {
|
||
// boolean
|
||
.bin_lt,
|
||
.bin_le,
|
||
.bin_gt,
|
||
.bin_ge,
|
||
.bin_in,
|
||
.bin_instanceof,
|
||
.bin_loose_eq,
|
||
.bin_loose_ne,
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
// string, number, or bigint
|
||
.bin_add,
|
||
.bin_add_assign,
|
||
// number or bigint
|
||
.bin_sub,
|
||
.bin_mul,
|
||
.bin_div,
|
||
.bin_rem,
|
||
.bin_pow,
|
||
.bin_sub_assign,
|
||
.bin_mul_assign,
|
||
.bin_div_assign,
|
||
.bin_rem_assign,
|
||
.bin_pow_assign,
|
||
.bin_shl,
|
||
.bin_shr,
|
||
.bin_u_shr,
|
||
.bin_shl_assign,
|
||
.bin_shr_assign,
|
||
.bin_u_shr_assign,
|
||
.bin_bitwise_or,
|
||
.bin_bitwise_and,
|
||
.bin_bitwise_xor,
|
||
.bin_bitwise_or_assign,
|
||
.bin_bitwise_and_assign,
|
||
.bin_bitwise_xor_assign,
|
||
=> {
|
||
return true;
|
||
},
|
||
|
||
// These always return one of the arguments unmodified
|
||
.bin_logical_and,
|
||
.bin_logical_or,
|
||
.bin_nullish_coalescing,
|
||
.bin_logical_and_assign,
|
||
.bin_logical_or_assign,
|
||
.bin_nullish_coalescing_assign,
|
||
=> {
|
||
return isPrimitiveWithSideEffects(e.left.data) and isPrimitiveWithSideEffects(e.right.data);
|
||
},
|
||
.bin_comma => {
|
||
return isPrimitiveWithSideEffects(e.right.data);
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
.e_if => |e| {
|
||
return isPrimitiveWithSideEffects(e.yes.data) and isPrimitiveWithSideEffects(e.no.data);
|
||
},
|
||
else => {},
|
||
}
|
||
return false;
|
||
}
|
||
|
||
pub const toTypeOf = Expr.Data.typeof;
|
||
|
||
pub fn toNullOrUndefined(p: anytype, exp: Expr.Data) Result {
|
||
if (!p.options.features.dead_code_elimination) {
|
||
// value should not be read if ok is false, all existing calls to this function already adhere to this
|
||
return Result{ .ok = false, .value = undefined, .side_effects = .could_have_side_effects };
|
||
}
|
||
switch (exp) {
|
||
// Never null or undefined
|
||
.e_boolean, .e_number, .e_string, .e_reg_exp, .e_function, .e_arrow, .e_big_int => {
|
||
return Result{ .value = false, .side_effects = .no_side_effects, .ok = true };
|
||
},
|
||
|
||
.e_object, .e_array, .e_class => {
|
||
return Result{ .value = false, .side_effects = .could_have_side_effects, .ok = true };
|
||
},
|
||
|
||
// always a null or undefined
|
||
.e_null, .e_undefined => {
|
||
return Result{ .value = true, .side_effects = .no_side_effects, .ok = true };
|
||
},
|
||
|
||
.e_unary => |e| {
|
||
switch (e.op) {
|
||
// Always number or bigint
|
||
.un_pos,
|
||
.un_neg,
|
||
.un_cpl,
|
||
.un_pre_dec,
|
||
.un_pre_inc,
|
||
.un_post_dec,
|
||
.un_post_inc,
|
||
|
||
// Always boolean
|
||
.un_not,
|
||
.un_typeof,
|
||
.un_delete,
|
||
=> {
|
||
return Result{ .ok = true, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||
},
|
||
|
||
// Always undefined
|
||
.un_void => {
|
||
return Result{ .value = true, .side_effects = .could_have_side_effects, .ok = true };
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
},
|
||
|
||
.e_binary => |e| {
|
||
switch (e.op) {
|
||
// always string or number or bigint
|
||
.bin_add,
|
||
.bin_add_assign,
|
||
// always number or bigint
|
||
.bin_sub,
|
||
.bin_mul,
|
||
.bin_div,
|
||
.bin_rem,
|
||
.bin_pow,
|
||
.bin_sub_assign,
|
||
.bin_mul_assign,
|
||
.bin_div_assign,
|
||
.bin_rem_assign,
|
||
.bin_pow_assign,
|
||
.bin_shl,
|
||
.bin_shr,
|
||
.bin_u_shr,
|
||
.bin_shl_assign,
|
||
.bin_shr_assign,
|
||
.bin_u_shr_assign,
|
||
.bin_bitwise_or,
|
||
.bin_bitwise_and,
|
||
.bin_bitwise_xor,
|
||
.bin_bitwise_or_assign,
|
||
.bin_bitwise_and_assign,
|
||
.bin_bitwise_xor_assign,
|
||
// always boolean
|
||
.bin_lt,
|
||
.bin_le,
|
||
.bin_gt,
|
||
.bin_ge,
|
||
.bin_in,
|
||
.bin_instanceof,
|
||
.bin_loose_eq,
|
||
.bin_loose_ne,
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
=> {
|
||
return Result{ .ok = true, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||
},
|
||
|
||
.bin_comma => {
|
||
const res = toNullOrUndefined(p, e.right.data);
|
||
if (res.ok) {
|
||
return Result{ .ok = true, .value = res.value, .side_effects = SideEffects.could_have_side_effects };
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
.e_inlined_enum => |inlined| {
|
||
return toNullOrUndefined(p, inlined.value.data);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return Result{ .ok = false, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||
}
|
||
|
||
pub fn toBoolean(p: anytype, exp: Expr.Data) Result {
|
||
if (!p.options.features.dead_code_elimination) {
|
||
// value should not be read if ok is false, all existing calls to this function already adhere to this
|
||
return Result{ .ok = false, .value = undefined, .side_effects = .could_have_side_effects };
|
||
}
|
||
switch (exp) {
|
||
.e_null, .e_undefined => {
|
||
return Result{ .ok = true, .value = false, .side_effects = .no_side_effects };
|
||
},
|
||
.e_boolean => |e| {
|
||
return Result{ .ok = true, .value = e.value, .side_effects = .no_side_effects };
|
||
},
|
||
.e_number => |e| {
|
||
return Result{ .ok = true, .value = e.value != 0.0 and !std.math.isNan(e.value), .side_effects = .no_side_effects };
|
||
},
|
||
.e_big_int => |e| {
|
||
return Result{ .ok = true, .value = !strings.eqlComptime(e.value, "0"), .side_effects = .no_side_effects };
|
||
},
|
||
.e_string => |e| {
|
||
return Result{ .ok = true, .value = e.isPresent(), .side_effects = .no_side_effects };
|
||
},
|
||
.e_function, .e_arrow, .e_reg_exp => {
|
||
return Result{ .ok = true, .value = true, .side_effects = .no_side_effects };
|
||
},
|
||
.e_object, .e_array, .e_class => {
|
||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||
},
|
||
.e_unary => |e_| {
|
||
switch (e_.op) {
|
||
.un_void => {
|
||
return Result{ .ok = true, .value = false, .side_effects = .could_have_side_effects };
|
||
},
|
||
.un_typeof => {
|
||
// Never an empty string
|
||
|
||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||
},
|
||
.un_not => {
|
||
var result = toBoolean(p, e_.value.data);
|
||
if (result.ok) {
|
||
result.value = !result.value;
|
||
return result;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
.e_binary => |e_| {
|
||
switch (e_.op) {
|
||
.bin_logical_or => {
|
||
// "anything || truthy" is truthy
|
||
const result = toBoolean(p, e_.right.data);
|
||
if (result.value and result.ok) {
|
||
return Result{ .ok = true, .value = true, .side_effects = .could_have_side_effects };
|
||
}
|
||
},
|
||
.bin_logical_and => {
|
||
// "anything && falsy" is falsy
|
||
const result = toBoolean(p, e_.right.data);
|
||
if (!result.value and result.ok) {
|
||
return Result{ .ok = true, .value = false, .side_effects = .could_have_side_effects };
|
||
}
|
||
},
|
||
.bin_comma => {
|
||
// "anything, truthy/falsy" is truthy/falsy
|
||
var result = toBoolean(p, e_.right.data);
|
||
if (result.ok) {
|
||
result.side_effects = .could_have_side_effects;
|
||
return result;
|
||
}
|
||
},
|
||
.bin_gt => {
|
||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||
return Result{ .ok = true, .value = left_num > right_num, .side_effects = .no_side_effects };
|
||
}
|
||
}
|
||
},
|
||
.bin_lt => {
|
||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||
return Result{ .ok = true, .value = left_num < right_num, .side_effects = .no_side_effects };
|
||
}
|
||
}
|
||
},
|
||
.bin_le => {
|
||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||
return Result{ .ok = true, .value = left_num <= right_num, .side_effects = .no_side_effects };
|
||
}
|
||
}
|
||
},
|
||
.bin_ge => {
|
||
if (e_.left.data.toFiniteNumber()) |left_num| {
|
||
if (e_.right.data.toFiniteNumber()) |right_num| {
|
||
return Result{ .ok = true, .value = left_num >= right_num, .side_effects = .no_side_effects };
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
.e_inlined_enum => |inlined| {
|
||
return toBoolean(p, inlined.value.data);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return Result{ .ok = false, .value = false, .side_effects = SideEffects.could_have_side_effects };
|
||
}
|
||
};
|
||
|
||
const ExprOrLetStmt = struct {
|
||
stmt_or_expr: js_ast.StmtOrExpr,
|
||
decls: []G.Decl = &([_]G.Decl{}),
|
||
};
|
||
|
||
const FunctionKind = enum { stmt, expr };
|
||
|
||
const AsyncPrefixExpression = enum(u2) {
|
||
none,
|
||
is_yield,
|
||
is_async,
|
||
is_await,
|
||
|
||
const map = bun.ComptimeStringMap(AsyncPrefixExpression, .{
|
||
.{ "yield", .is_yield },
|
||
.{ "await", .is_await },
|
||
.{ "async", .is_async },
|
||
});
|
||
|
||
pub fn find(ident: string) AsyncPrefixExpression {
|
||
return map.get(ident) orelse .none;
|
||
}
|
||
};
|
||
|
||
const IdentifierOpts = packed struct {
|
||
assign_target: js_ast.AssignTarget = js_ast.AssignTarget.none,
|
||
is_delete_target: bool = false,
|
||
was_originally_identifier: bool = false,
|
||
is_call_target: bool = false,
|
||
};
|
||
|
||
fn statementCaresAboutScope(stmt: Stmt) bool {
|
||
return switch (stmt.data) {
|
||
.s_block,
|
||
.s_empty,
|
||
.s_debugger,
|
||
.s_expr,
|
||
.s_if,
|
||
.s_for,
|
||
.s_for_in,
|
||
.s_for_of,
|
||
.s_do_while,
|
||
.s_while,
|
||
.s_with,
|
||
.s_try,
|
||
.s_switch,
|
||
.s_return,
|
||
.s_throw,
|
||
.s_break,
|
||
.s_continue,
|
||
.s_directive,
|
||
.s_label,
|
||
=> false,
|
||
|
||
.s_local => |local| local.kind != .k_var,
|
||
else => true,
|
||
};
|
||
}
|
||
|
||
const ExprIn = struct {
|
||
// This tells us if there are optional chain expressions (EDot, EIndex, or
|
||
// ECall) that are chained on to this expression. Because of the way the AST
|
||
// works, chaining expressions on to this expression means they are our
|
||
// parent expressions.
|
||
//
|
||
// Some examples:
|
||
//
|
||
// a?.b.c // EDot
|
||
// a?.b[c] // EIndex
|
||
// a?.b() // ECall
|
||
//
|
||
// Note that this is false if our parent is a node with a OptionalChain
|
||
// value of OptionalChainStart. That means it's the start of a new chain, so
|
||
// it's not considered part of this one.
|
||
//
|
||
// Some examples:
|
||
//
|
||
// a?.b?.c // EDot
|
||
// a?.b?.[c] // EIndex
|
||
// a?.b?.() // ECall
|
||
//
|
||
// Also note that this is false if our parent is a node with a OptionalChain
|
||
// value of OptionalChainNone. That means it's outside parentheses, which
|
||
// means it's no longer part of the chain.
|
||
//
|
||
// Some examples:
|
||
//
|
||
// (a?.b).c // EDot
|
||
// (a?.b)[c] // EIndex
|
||
// (a?.b)() // ECall
|
||
//
|
||
has_chain_parent: bool = false,
|
||
|
||
// If our parent is an ECall node with an OptionalChain value of
|
||
// OptionalChainStart, then we will need to store the value for the "this" of
|
||
// that call somewhere if the current expression is an optional chain that
|
||
// ends in a property access. That's because the value for "this" will be
|
||
// used twice: once for the inner optional chain and once for the outer
|
||
// optional chain.
|
||
//
|
||
// Example:
|
||
//
|
||
// // Original
|
||
// a?.b?.();
|
||
//
|
||
// // Lowered
|
||
// var _a;
|
||
// (_a = a == null ? void 0 : a.b) == null ? void 0 : _a.call(a);
|
||
//
|
||
// In the example above we need to store "a" as the value for "this" so we
|
||
// can substitute it back in when we call "_a" if "_a" is indeed present.
|
||
// See also "thisArgFunc" and "thisArgWrapFunc" in "exprOut".
|
||
store_this_arg_for_parent_optional_chain: bool = false,
|
||
|
||
// Certain substitutions of identifiers are disallowed for assignment targets.
|
||
// For example, we shouldn't transform "undefined = 1" into "void 0 = 1". This
|
||
// isn't something real-world code would do but it matters for conformance
|
||
// tests.
|
||
assign_target: js_ast.AssignTarget = js_ast.AssignTarget.none,
|
||
|
||
// Currently this is only used when unwrapping a call to `require()`
|
||
// with `__toESM()`.
|
||
is_immediately_assigned_to_decl: bool = false,
|
||
|
||
property_access_for_method_call_maybe_should_replace_with_undefined: bool = false,
|
||
};
|
||
|
||
const Tup = std.meta.Tuple;
|
||
|
||
// This function exists to tie all of these checks together in one place
|
||
// This can sometimes show up on benchmarks as a small thing.
|
||
fn isEvalOrArguments(name: string) bool {
|
||
return strings.eqlComptime(name, "eval") or strings.eqlComptime(name, "arguments");
|
||
}
|
||
|
||
const PrependTempRefsOpts = struct {
|
||
fn_body_loc: ?logger.Loc = null,
|
||
kind: StmtsKind = StmtsKind.none,
|
||
};
|
||
|
||
pub const StmtsKind = enum {
|
||
none,
|
||
loop_body,
|
||
switch_stmt,
|
||
fn_body,
|
||
};
|
||
|
||
fn notimpl() noreturn {
|
||
Output.panic("Not implemented yet!!", .{});
|
||
}
|
||
|
||
const ExprBindingTuple = struct {
|
||
expr: ?ExprNodeIndex = null,
|
||
binding: ?Binding = null,
|
||
};
|
||
|
||
const TempRef = struct {
|
||
ref: Ref,
|
||
value: ?Expr = null,
|
||
};
|
||
|
||
const ImportNamespaceCallOrConstruct = struct {
|
||
ref: Ref,
|
||
is_construct: bool = false,
|
||
};
|
||
|
||
const ThenCatchChain = struct {
|
||
next_target: js_ast.Expr.Data,
|
||
has_multiple_args: bool = false,
|
||
has_catch: bool = false,
|
||
};
|
||
|
||
const ParsedPath = struct {
|
||
loc: logger.Loc,
|
||
text: string,
|
||
is_macro: bool,
|
||
import_tag: ImportRecord.Tag = .none,
|
||
};
|
||
|
||
const StrictModeFeature = enum {
|
||
with_statement,
|
||
delete_bare_name,
|
||
for_in_var_init,
|
||
eval_or_arguments,
|
||
reserved_word,
|
||
legacy_octal_literal,
|
||
legacy_octal_escape,
|
||
if_else_function_stmt,
|
||
};
|
||
|
||
const Map = std.AutoHashMapUnmanaged;
|
||
|
||
const List = std.ArrayListUnmanaged;
|
||
const ListManaged = std.ArrayList;
|
||
const InvalidLoc = struct {
|
||
loc: logger.Loc,
|
||
kind: Tag = Tag.unknown,
|
||
|
||
pub const Tag = enum {
|
||
spread,
|
||
parentheses,
|
||
getter,
|
||
setter,
|
||
method,
|
||
unknown,
|
||
};
|
||
|
||
pub fn addError(loc: InvalidLoc, log: *logger.Log, source: *const logger.Source) void {
|
||
@setCold(true);
|
||
const text = switch (loc.kind) {
|
||
.spread => "Unexpected trailing comma after rest element",
|
||
.parentheses => "Unexpected parentheses in binding pattern",
|
||
.getter => "Unexpected getter in binding pattern",
|
||
.setter => "Unexpected setter in binding pattern",
|
||
.method => "Unexpected method in binding pattern",
|
||
.unknown => "Invalid binding pattern",
|
||
};
|
||
log.addError(source, loc.loc, text) catch unreachable;
|
||
}
|
||
};
|
||
const LocList = ListManaged(InvalidLoc);
|
||
const StmtList = ListManaged(Stmt);
|
||
|
||
// This hash table is used every time we parse function args
|
||
// Rather than allocating a new hash table each time, we can just reuse the previous allocation
|
||
|
||
const StringVoidMap = struct {
|
||
allocator: Allocator,
|
||
map: bun.StringHashMapUnmanaged(void) = bun.StringHashMapUnmanaged(void){},
|
||
|
||
/// Returns true if the map already contained the given key.
|
||
pub fn getOrPutContains(this: *StringVoidMap, key: string) bool {
|
||
const entry = this.map.getOrPut(this.allocator, key) catch unreachable;
|
||
return entry.found_existing;
|
||
}
|
||
|
||
pub fn contains(this: *StringVoidMap, key: string) bool {
|
||
return this.map.contains(key);
|
||
}
|
||
|
||
fn init(allocator: Allocator) anyerror!StringVoidMap {
|
||
return StringVoidMap{ .allocator = allocator };
|
||
}
|
||
|
||
pub fn reset(this: *StringVoidMap) void {
|
||
// We must reset or the hash table will contain invalid pointers
|
||
this.map.clearRetainingCapacity();
|
||
}
|
||
|
||
pub inline fn get(allocator: Allocator) *Node {
|
||
return Pool.get(allocator);
|
||
}
|
||
|
||
pub inline fn release(node: *Node) void {
|
||
Pool.release(node);
|
||
}
|
||
|
||
pub const Pool = ObjectPool(StringVoidMap, init, true, 32);
|
||
pub const Node = Pool.Node;
|
||
};
|
||
const RefCtx = @import("./ast/base.zig").RefCtx;
|
||
const SymbolUseMap = js_ast.Part.SymbolUseMap;
|
||
const SymbolPropertyUseMap = js_ast.Part.SymbolPropertyUseMap;
|
||
const StringBoolMap = bun.StringHashMapUnmanaged(bool);
|
||
const RefMap = std.HashMapUnmanaged(Ref, void, RefCtx, 80);
|
||
const RefArrayMap = std.ArrayHashMapUnmanaged(Ref, void, @import("./ast/base.zig").RefHashCtx, false);
|
||
|
||
const RefRefMap = std.HashMapUnmanaged(Ref, Ref, RefCtx, 80);
|
||
const ImportRecord = importRecord.ImportRecord;
|
||
const Flags = js_ast.Flags;
|
||
const ScopeOrder = struct {
|
||
loc: logger.Loc,
|
||
scope: *js_ast.Scope,
|
||
};
|
||
|
||
const ParenExprOpts = struct {
|
||
async_range: logger.Range = logger.Range.None,
|
||
is_async: bool = false,
|
||
force_arrow_fn: bool = false,
|
||
};
|
||
|
||
const AwaitOrYield = enum(u3) {
|
||
allow_ident,
|
||
allow_expr,
|
||
forbid_all,
|
||
};
|
||
|
||
// This is function-specific information used during parsing. It is saved and
|
||
// restored on the call stack around code that parses nested functions and
|
||
// arrow expressions.
|
||
const FnOrArrowDataParse = struct {
|
||
async_range: logger.Range = logger.Range.None,
|
||
needs_async_loc: logger.Loc = logger.Loc.Empty,
|
||
allow_await: AwaitOrYield = AwaitOrYield.allow_ident,
|
||
allow_yield: AwaitOrYield = AwaitOrYield.allow_ident,
|
||
allow_super_call: bool = false,
|
||
allow_super_property: bool = false,
|
||
is_top_level: bool = false,
|
||
is_constructor: bool = false,
|
||
is_typescript_declare: bool = false,
|
||
|
||
has_argument_decorators: bool = false,
|
||
has_decorators: bool = false,
|
||
|
||
is_return_disallowed: bool = false,
|
||
is_this_disallowed: bool = false,
|
||
|
||
has_async_range: bool = false,
|
||
arrow_arg_errors: DeferredArrowArgErrors = DeferredArrowArgErrors{},
|
||
track_arrow_arg_errors: bool = false,
|
||
|
||
// In TypeScript, forward declarations of functions have no bodies
|
||
allow_missing_body_for_type_script: bool = false,
|
||
|
||
// Allow TypeScript decorators in function arguments
|
||
allow_ts_decorators: bool = false,
|
||
|
||
pub fn i() FnOrArrowDataParse {
|
||
return FnOrArrowDataParse{ .allow_await = AwaitOrYield.forbid_all };
|
||
}
|
||
};
|
||
|
||
// This is function-specific information used during visiting. It is saved and
|
||
// restored on the call stack around code that parses nested functions and
|
||
// arrow expressions.
|
||
const FnOrArrowDataVisit = struct {
|
||
// super_index_ref: ?*Ref = null,
|
||
|
||
is_arrow: bool = false,
|
||
is_async: bool = false,
|
||
is_inside_loop: bool = false,
|
||
is_inside_switch: bool = false,
|
||
is_outside_fn_or_arrow: bool = false,
|
||
|
||
// This is used to silence unresolvable imports due to "require" calls inside
|
||
// a try/catch statement. The assumption is that the try/catch statement is
|
||
// there to handle the case where the reference to "require" crashes.
|
||
try_body_count: i32 = 0,
|
||
};
|
||
|
||
// This is function-specific information used during visiting. It is saved and
|
||
// restored on the call stack around code that parses nested functions (but not
|
||
// nested arrow functions).
|
||
const FnOnlyDataVisit = struct {
|
||
// This is a reference to the magic "arguments" variable that exists inside
|
||
// functions in JavaScript. It will be non-nil inside functions and nil
|
||
// otherwise.
|
||
arguments_ref: ?Ref = null,
|
||
|
||
// Arrow functions don't capture the value of "this" and "arguments". Instead,
|
||
// the values are inherited from the surrounding context. If arrow functions
|
||
// are turned into regular functions due to lowering, we will need to generate
|
||
// local variables to capture these values so they are preserved correctly.
|
||
this_capture_ref: ?Ref = null,
|
||
arguments_capture_ref: ?Ref = null,
|
||
|
||
/// This is a reference to the enclosing class name if there is one. It's used
|
||
/// to implement "this" and "super" references. A name is automatically generated
|
||
/// if one is missing so this will always be present inside a class body.
|
||
class_name_ref: ?*Ref = null,
|
||
|
||
/// If true, we're inside a static class context where "this" expressions
|
||
/// should be replaced with the class name.
|
||
should_replace_this_with_class_name_ref: bool = false,
|
||
|
||
// If we're inside an async arrow function and async functions are not
|
||
// supported, then we will have to convert that arrow function to a generator
|
||
// function. That means references to "arguments" inside the arrow function
|
||
// will have to reference a captured variable instead of the real variable.
|
||
is_inside_async_arrow_fn: bool = false,
|
||
|
||
// If false, disallow "new.target" expressions. We disallow all "new.target"
|
||
// expressions at the top-level of the file (i.e. not inside a function or
|
||
// a class field). Technically since CommonJS files are wrapped in a function
|
||
// you can use "new.target" in node as an alias for "undefined" but we don't
|
||
// support that.
|
||
is_new_target_allowed: bool = false,
|
||
|
||
// If false, the value for "this" is the top-level module scope "this" value.
|
||
// That means it's "undefined" for ECMAScript modules and "exports" for
|
||
// CommonJS modules. We track this information so that we can substitute the
|
||
// correct value for these top-level "this" references at compile time instead
|
||
// of passing the "this" expression through to the output and leaving the
|
||
// interpretation up to the run-time behavior of the generated code.
|
||
//
|
||
// If true, the value for "this" is nested inside something (either a function
|
||
// or a class declaration). That means the top-level module scope "this" value
|
||
// has been shadowed and is now inaccessible.
|
||
is_this_nested: bool = false,
|
||
};
|
||
|
||
// Due to ES6 destructuring patterns, there are many cases where it's
|
||
// impossible to distinguish between an array or object literal and a
|
||
// destructuring assignment until we hit the "=" operator later on.
|
||
// This object defers errors about being in one state or the other
|
||
// until we discover which state we're in.
|
||
const DeferredErrors = struct {
|
||
// These are errors for expressions
|
||
invalid_expr_default_value: ?logger.Range = null,
|
||
invalid_expr_after_question: ?logger.Range = null,
|
||
array_spread_feature: ?logger.Range = null,
|
||
|
||
pub fn isEmpty(self: *DeferredErrors) bool {
|
||
return self.invalid_expr_default_value == null and self.invalid_expr_after_question == null and self.array_spread_feature == null;
|
||
}
|
||
|
||
pub fn mergeInto(self: *DeferredErrors, to: *DeferredErrors) void {
|
||
to.invalid_expr_default_value = self.invalid_expr_default_value orelse to.invalid_expr_default_value;
|
||
to.invalid_expr_after_question = self.invalid_expr_after_question orelse to.invalid_expr_after_question;
|
||
to.array_spread_feature = self.array_spread_feature orelse to.array_spread_feature;
|
||
}
|
||
|
||
const None = DeferredErrors{
|
||
.invalid_expr_default_value = null,
|
||
.invalid_expr_after_question = null,
|
||
.array_spread_feature = null,
|
||
};
|
||
};
|
||
|
||
const ImportClause = struct {
|
||
items: []js_ast.ClauseItem = &([_]js_ast.ClauseItem{}),
|
||
is_single_line: bool = false,
|
||
had_type_only_imports: bool = false,
|
||
};
|
||
|
||
const PropertyOpts = struct {
|
||
async_range: logger.Range = logger.Range.None,
|
||
declare_range: logger.Range = logger.Range.None,
|
||
is_async: bool = false,
|
||
is_generator: bool = false,
|
||
|
||
// Class-related options
|
||
is_static: bool = false,
|
||
is_class: bool = false,
|
||
class_has_extends: bool = false,
|
||
allow_ts_decorators: bool = false,
|
||
is_ts_abstract: bool = false,
|
||
ts_decorators: []Expr = &[_]Expr{},
|
||
has_argument_decorators: bool = false,
|
||
has_class_decorators: bool = false,
|
||
};
|
||
|
||
pub const ScanPassResult = struct {
|
||
pub const ParsePassSymbolUse = struct { ref: Ref, used: bool = false, import_record_index: u32 };
|
||
pub const NamespaceCounter = struct { count: u16, import_record_index: u32 };
|
||
pub const ParsePassSymbolUsageMap = bun.StringArrayHashMap(ParsePassSymbolUse);
|
||
import_records: ListManaged(ImportRecord),
|
||
named_imports: js_ast.Ast.NamedImports,
|
||
used_symbols: ParsePassSymbolUsageMap,
|
||
import_records_to_keep: ListManaged(u32),
|
||
approximate_newline_count: usize = 0,
|
||
|
||
pub fn init(allocator: Allocator) ScanPassResult {
|
||
return .{
|
||
.import_records = ListManaged(ImportRecord).init(allocator),
|
||
.named_imports = .{},
|
||
.used_symbols = ParsePassSymbolUsageMap.init(allocator),
|
||
.import_records_to_keep = ListManaged(u32).init(allocator),
|
||
.approximate_newline_count = 0,
|
||
};
|
||
}
|
||
|
||
pub fn reset(scan_pass: *ScanPassResult) void {
|
||
scan_pass.named_imports.clearRetainingCapacity();
|
||
scan_pass.import_records.clearRetainingCapacity();
|
||
scan_pass.used_symbols.clearRetainingCapacity();
|
||
scan_pass.approximate_newline_count = 0;
|
||
}
|
||
};
|
||
|
||
fn MacroContextType() type {
|
||
if (comptime Environment.isWasm) {
|
||
return ?*anyopaque;
|
||
}
|
||
|
||
return js_ast.Macro.MacroContext;
|
||
}
|
||
|
||
pub const Parser = struct {
|
||
options: Options,
|
||
lexer: js_lexer.Lexer,
|
||
log: *logger.Log,
|
||
source: *const logger.Source,
|
||
define: *Define,
|
||
allocator: Allocator,
|
||
|
||
pub const Options = struct {
|
||
jsx: options.JSX.Pragma,
|
||
ts: bool = false,
|
||
keep_names: bool = true,
|
||
ignore_dce_annotations: bool = false,
|
||
preserve_unused_imports_ts: bool = false,
|
||
use_define_for_class_fields: bool = false,
|
||
suppress_warnings_about_weird_code: bool = true,
|
||
filepath_hash_for_hmr: u32 = 0,
|
||
features: RuntimeFeatures = .{},
|
||
|
||
tree_shaking: bool = false,
|
||
bundle: bool = false,
|
||
package_version: string = "",
|
||
|
||
macro_context: *MacroContextType() = undefined,
|
||
|
||
warn_about_unbundled_modules: bool = true,
|
||
|
||
module_type: options.ModuleType = .unknown,
|
||
output_format: options.Format = .esm,
|
||
|
||
transform_only: bool = false,
|
||
|
||
/// Used for inlining the state of import.meta.main during visiting
|
||
import_meta_main_value: ?bool = null,
|
||
lower_import_meta_main_for_node_js: bool = false,
|
||
|
||
/// When using react fast refresh or server components, the framework is
|
||
/// able to customize what import sources are used.
|
||
framework: ?*bun.bake.Framework = null,
|
||
|
||
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
|
||
bun.assert(!this.bundle);
|
||
|
||
if (did_use_jsx) {
|
||
if (this.jsx.parse) {
|
||
this.jsx.hashForRuntimeTranspiler(hasher);
|
||
// this holds the values for the jsx optimizaiton flags, which have both been removed
|
||
// as the optimizations break newer versions of react, see https://github.com/oven-sh/bun/issues/11025
|
||
const jsx_optimizations = [_]bool{ false, false };
|
||
hasher.update(std.mem.asBytes(&jsx_optimizations));
|
||
} else {
|
||
hasher.update("NO_JSX");
|
||
}
|
||
}
|
||
|
||
if (this.ts) {
|
||
hasher.update("TS");
|
||
} else {
|
||
hasher.update("NO_TS");
|
||
}
|
||
|
||
if (this.ignore_dce_annotations) {
|
||
hasher.update("no_dce");
|
||
}
|
||
|
||
this.features.hashForRuntimeTranspiler(hasher);
|
||
}
|
||
|
||
// Used to determine if `joinWithComma` should be called in `visitStmts`. We do this
|
||
// to avoid changing line numbers too much to make source mapping more readable
|
||
pub fn runtimeMergeAdjacentExpressionStatements(this: Options) bool {
|
||
return this.bundle;
|
||
}
|
||
|
||
pub fn init(jsx: options.JSX.Pragma, loader: options.Loader) Options {
|
||
var opts = Options{
|
||
.ts = loader.isTypeScript(),
|
||
.jsx = jsx,
|
||
};
|
||
opts.jsx.parse = loader.isJSX();
|
||
return opts;
|
||
}
|
||
};
|
||
|
||
pub fn scanImports(self: *Parser, scan_pass: *ScanPassResult) anyerror!void {
|
||
if (self.options.ts and self.options.jsx.parse) {
|
||
return try self._scanImports(TSXImportScanner, scan_pass);
|
||
} else if (self.options.ts) {
|
||
return try self._scanImports(TypeScriptImportScanner, scan_pass);
|
||
} else if (self.options.jsx.parse) {
|
||
return try self._scanImports(JSXImportScanner, scan_pass);
|
||
} else {
|
||
return try self._scanImports(JavaScriptImportScanner, scan_pass);
|
||
}
|
||
}
|
||
|
||
fn _scanImports(self: *Parser, comptime ParserType: type, scan_pass: *ScanPassResult) anyerror!void {
|
||
var p: ParserType = undefined;
|
||
|
||
try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
|
||
p.import_records = &scan_pass.import_records;
|
||
p.named_imports = &scan_pass.named_imports;
|
||
|
||
// The problem with our scan pass approach is type-only imports.
|
||
// We don't have accurate symbol counts.
|
||
// So we don't have a good way to distinguish between a type-only import and not.
|
||
if (comptime ParserType.parser_features.typescript) {
|
||
p.parse_pass_symbol_uses = &scan_pass.used_symbols;
|
||
}
|
||
|
||
// Parse the file in the first pass, but do not bind symbols
|
||
var opts = ParseStatementOptions{ .is_module_scope = true };
|
||
|
||
// Parsing seems to take around 2x as much time as visiting.
|
||
// Which makes sense.
|
||
// June 4: "Parsing took: 18028000"
|
||
// June 4: "Rest of this took: 8003000"
|
||
_ = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
|
||
|
||
//
|
||
if (comptime ParserType.parser_features.typescript) {
|
||
for (scan_pass.import_records.items) |*import_record| {
|
||
// Mark everything as unused
|
||
// Except:
|
||
// - export * as ns from 'foo';
|
||
// - export * from 'foo';
|
||
// - import 'foo';
|
||
// - import("foo")
|
||
// - require("foo")
|
||
import_record.is_unused = import_record.is_unused or
|
||
(import_record.kind == .stmt and
|
||
!import_record.was_originally_bare_import and
|
||
!import_record.calls_runtime_re_export_fn);
|
||
}
|
||
|
||
var iter = scan_pass.used_symbols.iterator();
|
||
while (iter.next()) |entry| {
|
||
const val = entry.value_ptr;
|
||
if (val.used) {
|
||
scan_pass.import_records.items[val.import_record_index].is_unused = false;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Symbol use counts are unavailable
|
||
// So we say "did we parse any JSX?"
|
||
// if yes, just automatically add the import so that .bun knows to include the file.
|
||
if (self.options.jsx.parse and p.needs_jsx_import) {
|
||
_ = p.addImportRecord(
|
||
.require,
|
||
logger.Loc{ .start = 0 },
|
||
p.options.jsx.importSource(),
|
||
);
|
||
// Ensure we have both classic and automatic
|
||
// This is to handle cases where they use fragments in the automatic runtime
|
||
_ = p.addImportRecord(
|
||
.require,
|
||
logger.Loc{ .start = 0 },
|
||
p.options.jsx.classic_import_source,
|
||
);
|
||
}
|
||
|
||
scan_pass.approximate_newline_count = p.lexer.approximate_newline_count;
|
||
}
|
||
|
||
pub fn toLazyExportAST(this: *Parser, expr: Expr, comptime runtime_api_call: []const u8) !js_ast.Result {
|
||
var p: JavaScriptParser = undefined;
|
||
try JavaScriptParser.init(this.allocator, this.log, this.source, this.define, this.lexer, this.options, &p);
|
||
p.lexer.track_comments = this.options.features.minify_identifiers;
|
||
// Instead of doing "should_fold_typescript_constant_expressions or features.minify_syntax"
|
||
// Let's enable this flag file-wide
|
||
if (p.options.features.minify_syntax or
|
||
p.options.features.inlining)
|
||
{
|
||
p.should_fold_typescript_constant_expressions = true;
|
||
}
|
||
|
||
defer p.lexer.deinit();
|
||
const result: js_ast.Result = undefined;
|
||
_ = result;
|
||
try p.prepareForVisitPass();
|
||
|
||
var final_expr = expr;
|
||
|
||
// Optionally call a runtime API function to transform the expression
|
||
if (runtime_api_call.len > 0) {
|
||
var args = try p.allocator.alloc(Expr, 1);
|
||
args[0] = expr;
|
||
final_expr = try p.callRuntime(expr.loc, runtime_api_call, args);
|
||
}
|
||
|
||
const ns_export_part = js_ast.Part{
|
||
.can_be_removed_if_unused = true,
|
||
};
|
||
|
||
var stmts = try p.allocator.alloc(js_ast.Stmt, 1);
|
||
stmts[0] = Stmt{
|
||
.data = .{
|
||
.s_lazy_export = expr.data,
|
||
},
|
||
.loc = expr.loc,
|
||
};
|
||
const part = js_ast.Part{
|
||
.stmts = stmts,
|
||
.symbol_uses = p.symbol_uses,
|
||
};
|
||
p.symbol_uses = .{};
|
||
var parts = try p.allocator.alloc(js_ast.Part, 2);
|
||
parts[0..2].* = .{ ns_export_part, part };
|
||
|
||
const exports_kind: js_ast.ExportsKind = brk: {
|
||
if (expr.data == .e_undefined) {
|
||
if (strings.eqlComptime(this.source.path.name.ext, ".cjs")) break :brk .cjs;
|
||
if (strings.eqlComptime(this.source.path.name.ext, ".mjs")) break :brk .esm;
|
||
}
|
||
break :brk .none;
|
||
};
|
||
return .{ .ast = try p.toAST(parts, exports_kind, .none, "") };
|
||
}
|
||
|
||
pub fn parse(self: *Parser) !js_ast.Result {
|
||
if (comptime Environment.isWasm) {
|
||
self.options.ts = true;
|
||
self.options.jsx.parse = true;
|
||
return try self._parse(TSXParser);
|
||
}
|
||
|
||
if (self.options.ts and self.options.jsx.parse) {
|
||
return try self._parse(TSXParser);
|
||
} else if (self.options.ts) {
|
||
return try self._parse(TypeScriptParser);
|
||
} else if (self.options.jsx.parse) {
|
||
return try self._parse(JSXParser);
|
||
} else {
|
||
return try self._parse(JavaScriptParser);
|
||
}
|
||
}
|
||
|
||
pub fn analyze(self: *Parser, context: *anyopaque, callback: *const fn (*anyopaque, *TSXParser, []js_ast.Part) anyerror!void) anyerror!void {
|
||
var p: TSXParser = undefined;
|
||
try TSXParser.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
|
||
|
||
defer p.lexer.deinit();
|
||
|
||
// Consume a leading hashbang comment
|
||
var hashbang: string = "";
|
||
if (p.lexer.token == .t_hashbang) {
|
||
hashbang = p.lexer.identifier;
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Parse the file in the first pass, but do not bind symbols
|
||
var opts = ParseStatementOptions{ .is_module_scope = true };
|
||
const parse_tracer = bun.tracy.traceNamed(@src(), "JSParser.parse");
|
||
|
||
const stmts = p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts) catch |err| {
|
||
if (comptime Environment.isWasm) {
|
||
Output.print("JSParser.parse: caught error {s} at location: {d}\n", .{ @errorName(err), p.lexer.loc().start });
|
||
p.log.print(Output.writer()) catch {};
|
||
}
|
||
return err;
|
||
};
|
||
|
||
parse_tracer.end();
|
||
|
||
if (self.log.errors > 0) {
|
||
if (comptime Environment.isWasm) {
|
||
// If the logger is backed by console.log, every print appends a newline.
|
||
// so buffering is kind of mandatory here
|
||
const fakeWriter = struct {
|
||
fn writeAll(_: @This(), data: []const u8) anyerror!usize {
|
||
if (data.len == 0) return 0;
|
||
|
||
Output.print("{s}", .{data});
|
||
return data.len;
|
||
}
|
||
};
|
||
const writer = std.io.Writer(fakeWriter, anyerror, fakeWriter.writeAll){
|
||
.context = fakeWriter{},
|
||
};
|
||
var buffered_writer = std.io.bufferedWriter(writer);
|
||
const actual = buffered_writer.writer();
|
||
for (self.log.msgs.items) |msg| {
|
||
var m: logger.Msg = msg;
|
||
m.writeFormat(actual, true) catch {};
|
||
}
|
||
buffered_writer.flush() catch {};
|
||
}
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
const visit_tracer = bun.tracy.traceNamed(@src(), "JSParser.visit");
|
||
try p.prepareForVisitPass();
|
||
|
||
var parts = ListManaged(js_ast.Part).init(p.allocator);
|
||
defer parts.deinit();
|
||
|
||
try p.appendPart(&parts, stmts);
|
||
visit_tracer.end();
|
||
|
||
const analyze_tracer = bun.tracy.traceNamed(@src(), "JSParser.analyze");
|
||
try callback(context, &p, parts.items);
|
||
analyze_tracer.end();
|
||
}
|
||
|
||
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
|
||
const prev_action = bun.crash_handler.current_action;
|
||
defer bun.crash_handler.current_action = prev_action;
|
||
bun.crash_handler.current_action = .{ .parse = self.source.path.text };
|
||
|
||
var p: ParserType = undefined;
|
||
const orig_error_count = self.log.errors;
|
||
try ParserType.init(self.allocator, self.log, self.source, self.define, self.lexer, self.options, &p);
|
||
|
||
if (p.options.features.hot_module_reloading) {
|
||
bun.assert(!p.options.tree_shaking);
|
||
}
|
||
|
||
// Instead of doing "should_fold_typescript_constant_expressions or features.minify_syntax"
|
||
// Let's enable this flag file-wide
|
||
if (p.options.features.minify_syntax or
|
||
p.options.features.inlining)
|
||
{
|
||
p.should_fold_typescript_constant_expressions = true;
|
||
}
|
||
|
||
defer p.lexer.deinit();
|
||
|
||
var binary_expression_stack_heap = std.heap.stackFallback(42 * @sizeOf(ParserType.BinaryExpressionVisitor), bun.default_allocator);
|
||
p.binary_expression_stack = std.ArrayList(ParserType.BinaryExpressionVisitor).initCapacity(
|
||
binary_expression_stack_heap.get(),
|
||
41, // one less in case of unlikely alignment between the stack buffer and reality
|
||
) catch unreachable; // stack allocation cannot fail
|
||
defer p.binary_expression_stack.clearAndFree();
|
||
|
||
var binary_expression_simplify_stack_heap = std.heap.stackFallback(48 * @sizeOf(SideEffects.BinaryExpressionSimplifyVisitor), bun.default_allocator);
|
||
p.binary_expression_simplify_stack = std.ArrayList(SideEffects.BinaryExpressionSimplifyVisitor).initCapacity(
|
||
binary_expression_simplify_stack_heap.get(),
|
||
47,
|
||
) catch unreachable; // stack allocation cannot fail
|
||
defer p.binary_expression_simplify_stack.clearAndFree();
|
||
|
||
if (Environment.allow_assert) {
|
||
bun.assert(binary_expression_stack_heap.fixed_buffer_allocator.ownsPtr(@ptrCast(p.binary_expression_stack.items)));
|
||
bun.assert(binary_expression_simplify_stack_heap.fixed_buffer_allocator.ownsPtr(@ptrCast(p.binary_expression_simplify_stack.items)));
|
||
}
|
||
|
||
// defer {
|
||
// if (p.allocated_names_pool) |pool| {
|
||
// pool.data = p.allocated_names;
|
||
// pool.release();
|
||
// p.allocated_names_pool = null;
|
||
// }
|
||
// }
|
||
|
||
// Consume a leading hashbang comment
|
||
var hashbang: string = "";
|
||
if (p.lexer.token == .t_hashbang) {
|
||
hashbang = p.lexer.identifier;
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Detect a leading "// @bun" pragma
|
||
if (p.lexer.bun_pragma != .none and p.options.features.dont_bundle_twice) {
|
||
return js_ast.Result{
|
||
.already_bundled = switch (p.lexer.bun_pragma) {
|
||
.bun => .bun,
|
||
.bytecode => .bytecode,
|
||
.bytecode_cjs => .bytecode_cjs,
|
||
.bun_cjs => .bun_cjs,
|
||
else => unreachable,
|
||
},
|
||
};
|
||
}
|
||
|
||
// We must check the cache only after we've consumed the hashbang and leading // @bun pragma
|
||
// We don't want to ever put files with `// @bun` into this cache, as that would be wasteful.
|
||
if (comptime Environment.isNative and bun.FeatureFlags.runtime_transpiler_cache) {
|
||
const runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = p.options.features.runtime_transpiler_cache;
|
||
if (runtime_transpiler_cache) |cache| {
|
||
if (cache.get(p.source, &p.options, p.options.jsx.parse and (!p.source.path.isNodeModule() or p.source.path.isJSXFile()))) {
|
||
return js_ast.Result{
|
||
.cached = {},
|
||
};
|
||
}
|
||
}
|
||
}
|
||
|
||
// Parse the file in the first pass, but do not bind symbols
|
||
var opts = ParseStatementOptions{ .is_module_scope = true };
|
||
const parse_tracer = bun.tracy.traceNamed(@src(), "JSParser.parse");
|
||
|
||
// Parsing seems to take around 2x as much time as visiting.
|
||
// Which makes sense.
|
||
// June 4: "Parsing took: 18028000"
|
||
// June 4: "Rest of this took: 8003000"
|
||
const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
|
||
|
||
parse_tracer.end();
|
||
|
||
// Halt parsing right here if there were any errors
|
||
// This fixes various conditions that would cause crashes due to the AST being in an invalid state while visiting
|
||
// In a number of situations, we continue to parsing despite errors so that we can report more errors to the user
|
||
// Example where NOT halting causes a crash: A TS enum with a number literal as a member name
|
||
// https://discord.com/channels/876711213126520882/876711213126520885/1039325382488371280
|
||
if (self.log.errors > orig_error_count) {
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
bun.crash_handler.current_action = .{ .visit = self.source.path.text };
|
||
|
||
const visit_tracer = bun.tracy.traceNamed(@src(), "JSParser.visit");
|
||
try p.prepareForVisitPass();
|
||
|
||
// ESM is always strict mode. I don't think we need this.
|
||
// // Strip off a leading "use strict" directive when not bundling
|
||
// var directive = "";
|
||
|
||
// Insert a variable for "import.meta" at the top of the file if it was used.
|
||
// We don't need to worry about "use strict" directives because this only
|
||
// happens when bundling, in which case we are flatting the module scopes of
|
||
// all modules together anyway so such directives are meaningless.
|
||
// if (!p.import_meta_ref.isSourceIndexNull()) {
|
||
// // heap so it lives beyond this function call
|
||
// var decls = try p.allocator.alloc(G.Decl, 1);
|
||
// decls[0] = Decl{ .binding = p.b(B.Identifier{
|
||
// .ref = p.import_meta_ref,
|
||
// }, logger.Loc.Empty), .value = p.newExpr(E.Object{}, logger.Loc.Empty) };
|
||
// var importMetaStatement = p.s(S.Local{
|
||
// .kind = .k_const,
|
||
// .decls = decls,
|
||
// }, logger.Loc.Empty);
|
||
// }
|
||
|
||
var before = ListManaged(js_ast.Part).init(p.allocator);
|
||
var after = ListManaged(js_ast.Part).init(p.allocator);
|
||
var parts = ListManaged(js_ast.Part).init(p.allocator);
|
||
defer {
|
||
after.deinit();
|
||
before.deinit();
|
||
}
|
||
|
||
if (p.options.bundle) {
|
||
// The bundler requires a part for generated module wrappers. This
|
||
// part must be at the start as it is referred to by index.
|
||
before.append(js_ast.Part{}) catch bun.outOfMemory();
|
||
}
|
||
|
||
// --inspect-brk
|
||
if (p.options.features.set_breakpoint_on_first_line) {
|
||
var debugger_stmts = try p.allocator.alloc(Stmt, 1);
|
||
debugger_stmts[0] = Stmt{
|
||
.data = .{ .s_debugger = .{} },
|
||
.loc = logger.Loc.Empty,
|
||
};
|
||
before.append(
|
||
js_ast.Part{
|
||
.stmts = debugger_stmts,
|
||
},
|
||
) catch bun.outOfMemory();
|
||
}
|
||
|
||
// When "using" declarations appear at the top level, we change all TDZ
|
||
// variables in the top-level scope into "var" so that they aren't harmed
|
||
// when they are moved into the try/catch statement that lowering will
|
||
// generate.
|
||
//
|
||
// This is necessary because exported function declarations must be hoisted
|
||
// outside of the try/catch statement because they can be evaluated before
|
||
// this module is evaluated due to ESM cross-file function hoisting. And
|
||
// these function bodies might reference anything else in this scope, which
|
||
// must still work when those things are moved inside a try/catch statement.
|
||
//
|
||
// Before:
|
||
//
|
||
// using foo = get()
|
||
// export function fn() {
|
||
// return [foo, new Bar]
|
||
// }
|
||
// class Bar {}
|
||
//
|
||
// After ("fn" is hoisted, "Bar" is converted to "var"):
|
||
//
|
||
// export function fn() {
|
||
// return [foo, new Bar]
|
||
// }
|
||
// try {
|
||
// var foo = get();
|
||
// var Bar = class {};
|
||
// } catch (_) {
|
||
// ...
|
||
// } finally {
|
||
// ...
|
||
// }
|
||
//
|
||
// This is also necessary because other code might be appended to the code
|
||
// that we're processing and expect to be able to access top-level variables.
|
||
p.will_wrap_module_in_try_catch_for_using = p.shouldLowerUsingDeclarations(stmts);
|
||
|
||
// Bind symbols in a second pass over the AST. I started off doing this in a
|
||
// single pass, but it turns out it's pretty much impossible to do this
|
||
// correctly while handling arrow functions because of the grammar
|
||
// ambiguities.
|
||
//
|
||
// Note that top-level lowered "using" declarations disable tree-shaking
|
||
// because we only do tree-shaking on top-level statements and lowering
|
||
// a top-level "using" declaration moves all top-level statements into a
|
||
// nested scope.
|
||
if (!p.options.tree_shaking or p.will_wrap_module_in_try_catch_for_using) {
|
||
// When tree shaking is disabled, everything comes in a single part
|
||
try p.appendPart(&parts, stmts);
|
||
} else {
|
||
// Preprocess TypeScript enums to improve code generation. Otherwise
|
||
// uses of an enum before that enum has been declared won't be inlined:
|
||
//
|
||
// console.log(Foo.FOO) // We want "FOO" to be inlined here
|
||
// const enum Foo { FOO = 0 }
|
||
//
|
||
// The TypeScript compiler itself contains code with this pattern, so
|
||
// it's important to implement this optimization.
|
||
|
||
var preprocessed_enums: std.ArrayListUnmanaged([]js_ast.Part) = .{};
|
||
var preprocessed_enum_i: usize = 0;
|
||
if (p.scopes_in_order_for_enum.count() > 0) {
|
||
for (stmts) |*stmt| {
|
||
if (stmt.data == .s_enum) {
|
||
const old_scopes_in_order = p.scope_order_to_visit;
|
||
defer p.scope_order_to_visit = old_scopes_in_order;
|
||
|
||
p.scope_order_to_visit = p.scopes_in_order_for_enum.get(stmt.loc).?;
|
||
|
||
var enum_parts = ListManaged(js_ast.Part).init(p.allocator);
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.appendAssumeCapacity(stmt.*);
|
||
try p.appendPart(&enum_parts, sliced.items);
|
||
try preprocessed_enums.append(p.allocator, enum_parts.items);
|
||
}
|
||
}
|
||
}
|
||
|
||
// When tree shaking is enabled, each top-level statement is potentially a separate part.
|
||
for (stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_local => |local| {
|
||
if (local.decls.len > 1) {
|
||
for (local.decls.slice()) |decl| {
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.items.len = 1;
|
||
var _local = local.*;
|
||
var list = try ListManaged(G.Decl).initCapacity(p.allocator, 1);
|
||
list.items.len = 1;
|
||
list.items[0] = decl;
|
||
_local.decls.update(list);
|
||
sliced.items[0] = p.s(_local, stmt.loc);
|
||
try p.appendPart(&parts, sliced.items);
|
||
}
|
||
} else {
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.items.len = 1;
|
||
sliced.items[0] = stmt;
|
||
try p.appendPart(&parts, sliced.items);
|
||
}
|
||
},
|
||
.s_import, .s_export_from, .s_export_star => {
|
||
const parts_list = if (p.options.bundle)
|
||
// Move imports (and import-like exports) to the top of the file to
|
||
// ensure that if they are converted to a require() call, the effects
|
||
// will take place before any other statements are evaluated.
|
||
&before
|
||
else
|
||
// If we aren't doing any format conversion, just keep these statements
|
||
// inline where they were. Exports are sorted so order doesn't matter:
|
||
// https://262.ecma-international.org/6.0/#sec-module-namespace-exotic-objects.
|
||
// However, this is likely an aesthetic issue that some people will
|
||
// complain about. In addition, there are code transformation tools
|
||
// such as TypeScript and Babel with bugs where the order of exports
|
||
// in the file is incorrectly preserved instead of sorted, so preserving
|
||
// the order of exports ourselves here may be preferable.
|
||
&parts;
|
||
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.items.len = 1;
|
||
sliced.items[0] = stmt;
|
||
try p.appendPart(parts_list, sliced.items);
|
||
},
|
||
|
||
.s_class => |class| {
|
||
// Move class export statements to the top of the file if we can
|
||
// This automatically resolves some cyclical import issues
|
||
// https://github.com/kysely-org/kysely/issues/412
|
||
const should_move = !p.options.bundle and class.class.canBeMoved();
|
||
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.items.len = 1;
|
||
sliced.items[0] = stmt;
|
||
try p.appendPart(&parts, sliced.items);
|
||
|
||
if (should_move) {
|
||
before.append(parts.getLast()) catch unreachable;
|
||
parts.items.len -= 1;
|
||
}
|
||
},
|
||
.s_export_default => |value| {
|
||
// We move export default statements when we can
|
||
// This automatically resolves some cyclical import issues in packages like luxon
|
||
// https://github.com/oven-sh/bun/issues/1961
|
||
const should_move = !p.options.bundle and value.canBeMoved();
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.items.len = 1;
|
||
sliced.items[0] = stmt;
|
||
try p.appendPart(&parts, sliced.items);
|
||
|
||
if (should_move) {
|
||
before.append(parts.getLast()) catch unreachable;
|
||
parts.items.len -= 1;
|
||
}
|
||
},
|
||
.s_enum => {
|
||
try parts.appendSlice(preprocessed_enums.items[preprocessed_enum_i]);
|
||
preprocessed_enum_i += 1;
|
||
p.scope_order_to_visit = p.scope_order_to_visit[1..];
|
||
},
|
||
else => {
|
||
var sliced = try ListManaged(Stmt).initCapacity(p.allocator, 1);
|
||
sliced.appendAssumeCapacity(stmt);
|
||
try p.appendPart(&parts, sliced.items);
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
visit_tracer.end();
|
||
|
||
// If there were errors while visiting, also halt here
|
||
if (self.log.errors > orig_error_count) {
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
const postvisit_tracer = bun.tracy.traceNamed(@src(), "JSParser.postvisit");
|
||
defer postvisit_tracer.end();
|
||
|
||
var uses_dirname = p.symbols.items[p.dirname_ref.innerIndex()].use_count_estimate > 0;
|
||
var uses_filename = p.symbols.items[p.filename_ref.innerIndex()].use_count_estimate > 0;
|
||
|
||
// Handle dirname and filename at bundle-time
|
||
// We always inject it at the top of the module
|
||
//
|
||
// This inlines
|
||
//
|
||
// var __dirname = "foo/bar"
|
||
// var __filename = "foo/bar/baz.js"
|
||
//
|
||
if (p.options.bundle or !p.options.features.commonjs_at_runtime) {
|
||
if (uses_dirname or uses_filename) {
|
||
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
|
||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
|
||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||
if (uses_dirname) {
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||
.value = p.newExpr(
|
||
E.String{
|
||
.data = p.source.path.name.dir,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||
}
|
||
if (uses_filename) {
|
||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||
.value = p.newExpr(
|
||
E.String{
|
||
.data = p.source.path.text,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||
}
|
||
|
||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||
part_stmts[0] = p.s(S.Local{
|
||
.kind = .k_var,
|
||
.decls = Decl.List.init(decls),
|
||
}, logger.Loc.Empty);
|
||
before.append(js_ast.Part{
|
||
.stmts = part_stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.tag = .dirname_filename,
|
||
}) catch unreachable;
|
||
uses_dirname = false;
|
||
uses_filename = false;
|
||
}
|
||
}
|
||
|
||
const did_import_fast_refresh = false;
|
||
_ = did_import_fast_refresh;
|
||
|
||
// This is a workaround for broken module environment checks in packages like lodash-es
|
||
// https://github.com/lodash/lodash/issues/5660
|
||
var force_esm = false;
|
||
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
if (p.imports_to_convert_from_require.items.len > 0) {
|
||
const all_stmts = p.allocator.alloc(Stmt, p.imports_to_convert_from_require.items.len) catch unreachable;
|
||
before.ensureUnusedCapacity(p.imports_to_convert_from_require.items.len) catch unreachable;
|
||
|
||
var remaining_stmts = all_stmts;
|
||
|
||
for (p.imports_to_convert_from_require.items) |deferred_import| {
|
||
var import_part_stmts = remaining_stmts[0..1];
|
||
remaining_stmts = remaining_stmts[1..];
|
||
|
||
p.module_scope.generated.push(p.allocator, deferred_import.namespace.ref.?) catch bun.outOfMemory();
|
||
|
||
import_part_stmts[0] = Stmt.alloc(
|
||
S.Import,
|
||
S.Import{
|
||
.star_name_loc = deferred_import.namespace.loc,
|
||
.import_record_index = deferred_import.import_record_id,
|
||
.namespace_ref = deferred_import.namespace.ref.?,
|
||
},
|
||
deferred_import.namespace.loc,
|
||
);
|
||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, 1) catch unreachable;
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = deferred_import.namespace.ref.?, .is_top_level = true });
|
||
before.appendAssumeCapacity(.{
|
||
.stmts = import_part_stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.tag = .import_to_convert_from_require,
|
||
// This part has a single symbol, so it may be removed if unused.
|
||
.can_be_removed_if_unused = true,
|
||
});
|
||
}
|
||
bun.assert(remaining_stmts.len == 0);
|
||
}
|
||
|
||
if (p.commonjs_named_exports.count() > 0) {
|
||
const export_refs = p.commonjs_named_exports.values();
|
||
const export_names = p.commonjs_named_exports.keys();
|
||
|
||
break_optimize: {
|
||
if (!p.commonjs_named_exports_deoptimized) {
|
||
var needs_decl_count: usize = 0;
|
||
for (export_refs) |*export_ref| {
|
||
needs_decl_count += @as(usize, @intFromBool(export_ref.needs_decl));
|
||
}
|
||
// This is a workaround for packages which have broken ESM checks
|
||
// If they never actually assign to exports.foo, only check for it
|
||
// and the package specifies type "module"
|
||
// and the package uses ESM syntax
|
||
// We should just say
|
||
// You're ESM and lying about it.
|
||
if (p.options.module_type == .esm or p.has_es_module_syntax) {
|
||
if (needs_decl_count == export_names.len) {
|
||
force_esm = true;
|
||
break :break_optimize;
|
||
}
|
||
}
|
||
|
||
if (needs_decl_count > 0) {
|
||
p.symbols.items[p.exports_ref.innerIndex()].use_count_estimate += @as(u32, @truncate(export_refs.len));
|
||
p.deoptimizeCommonJSNamedExports();
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!p.commonjs_named_exports_deoptimized and p.esm_export_keyword.len == 0) {
|
||
p.esm_export_keyword.loc = export_refs[0].loc_ref.loc;
|
||
p.esm_export_keyword.len = 5;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (parts.items.len < 4 and parts.items.len > 0 and p.options.features.unwrap_commonjs_to_esm) {
|
||
// Specially handle modules shaped like this:
|
||
//
|
||
// CommonJS:
|
||
//
|
||
// if (process.env.NODE_ENV === 'production')
|
||
// module.exports = require('./foo.prod.js')
|
||
// else
|
||
// module.exports = require('./foo.dev.js')
|
||
//
|
||
if (parts.items.len == 1 and parts.items[0].stmts.len == 1) {
|
||
var part = &parts.items[0];
|
||
const stmt: Stmt = part.stmts[0];
|
||
if (p.symbols.items[p.module_ref.innerIndex()].use_count_estimate == 1) {
|
||
if (stmt.data == .s_expr) {
|
||
const value: Expr = stmt.data.s_expr.value;
|
||
|
||
if (value.data == .e_binary) {
|
||
const bin = value.data.e_binary;
|
||
const left = bin.left;
|
||
const right = bin.right;
|
||
if (bin.op == .bin_assign and
|
||
left.data == .e_dot and
|
||
strings.eqlComptime(left.data.e_dot.name, "exports") and
|
||
left.data.e_dot.target.data == .e_identifier and
|
||
left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
|
||
{
|
||
const redirect_import_record_index: ?u32 = brk: {
|
||
// general case:
|
||
//
|
||
// module.exports = require("foo");
|
||
//
|
||
if (right.data == .e_require_string) {
|
||
break :brk right.data.e_require_string.import_record_index;
|
||
}
|
||
|
||
// special case: a module for us to unwrap
|
||
//
|
||
// module.exports = require("react/jsx-runtime")
|
||
// ^ was converted into:
|
||
//
|
||
// import * as Foo from 'bar';
|
||
// module.exports = Foo;
|
||
//
|
||
// This is what fixes #3537
|
||
if (right.data == .e_identifier and
|
||
p.import_records.items.len == 1 and
|
||
p.imports_to_convert_from_require.items.len == 1 and
|
||
p.imports_to_convert_from_require.items[0].namespace.ref.?.eql(right.data.e_identifier.ref))
|
||
{
|
||
// We know it's 0 because there is only one import in the whole file
|
||
// so that one import must be the one we're looking for
|
||
break :brk 0;
|
||
}
|
||
|
||
break :brk null;
|
||
};
|
||
if (redirect_import_record_index) |id| {
|
||
part.symbol_uses = .{};
|
||
return js_ast.Result{
|
||
.ast = js_ast.Ast{
|
||
.import_records = ImportRecord.List.init(p.import_records.items),
|
||
.redirect_import_record_index = id,
|
||
.named_imports = p.named_imports,
|
||
.named_exports = p.named_exports,
|
||
},
|
||
};
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.commonjs_named_exports_deoptimized and
|
||
p.options.features.unwrap_commonjs_to_esm and
|
||
p.unwrap_all_requires and
|
||
p.imports_to_convert_from_require.items.len == 1 and
|
||
p.import_records.items.len == 1 and
|
||
p.symbols.items[p.module_ref.innerIndex()].use_count_estimate == 1)
|
||
{
|
||
for (parts.items) |*part| {
|
||
// Specially handle modules shaped like this:
|
||
//
|
||
// doSomeStuff();
|
||
// module.exports = require('./foo.js');
|
||
//
|
||
// An example is react-dom/index.js, which does a DCE check.
|
||
if (part.stmts.len > 1) break;
|
||
|
||
for (part.stmts, 0..) |*stmt, j| {
|
||
if (stmt.data == .s_expr) {
|
||
const value: Expr = stmt.data.s_expr.value;
|
||
|
||
if (value.data == .e_binary) {
|
||
var bin = value.data.e_binary;
|
||
while (true) {
|
||
const left = bin.left;
|
||
const right = bin.right;
|
||
|
||
if (bin.op == .bin_assign and
|
||
right.data == .e_require_string and
|
||
left.data == .e_dot and
|
||
strings.eqlComptime(left.data.e_dot.name, "exports") and
|
||
left.data.e_dot.target.data == .e_identifier and
|
||
left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
|
||
{
|
||
p.export_star_import_records.append(
|
||
p.allocator,
|
||
right.data.e_require_string.import_record_index,
|
||
) catch unreachable;
|
||
const namespace_ref = p.imports_to_convert_from_require.items[
|
||
right.data.e_require_string.unwrapped_id
|
||
].namespace.ref.?;
|
||
|
||
part.stmts = brk: {
|
||
var new_stmts = try StmtList.initCapacity(p.allocator, part.stmts.len + 1);
|
||
new_stmts.appendSliceAssumeCapacity(part.stmts[0..j]);
|
||
|
||
new_stmts.appendAssumeCapacity(Stmt.alloc(
|
||
S.ExportStar,
|
||
S.ExportStar{
|
||
.import_record_index = right.data.e_require_string.import_record_index,
|
||
.namespace_ref = namespace_ref,
|
||
},
|
||
stmt.loc,
|
||
));
|
||
new_stmts.appendSliceAssumeCapacity(part.stmts[j + 1 ..]);
|
||
break :brk new_stmts.items;
|
||
};
|
||
|
||
part.import_record_indices.push(p.allocator, right.data.e_require_string.import_record_index) catch unreachable;
|
||
p.symbols.items[p.module_ref.innerIndex()].use_count_estimate = 0;
|
||
p.symbols.items[namespace_ref.innerIndex()].use_count_estimate -|= 1;
|
||
_ = part.symbol_uses.swapRemove(namespace_ref);
|
||
|
||
for (before.items, 0..) |before_part, i| {
|
||
if (before_part.tag == .import_to_convert_from_require) {
|
||
_ = before.swapRemove(i);
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (p.esm_export_keyword.len == 0) {
|
||
p.esm_export_keyword.loc = stmt.loc;
|
||
p.esm_export_keyword.len = 5;
|
||
}
|
||
p.commonjs_named_exports_deoptimized = false;
|
||
break;
|
||
}
|
||
|
||
if (right.data == .e_binary) {
|
||
bin = right.data.e_binary;
|
||
continue;
|
||
}
|
||
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
} else if (p.options.bundle and parts.items.len == 0) {
|
||
// This flag is disabled because it breaks circular export * as from
|
||
//
|
||
// entry.js:
|
||
//
|
||
// export * from './foo';
|
||
//
|
||
// foo.js:
|
||
//
|
||
// export const foo = 123
|
||
// export * as ns from './foo'
|
||
//
|
||
if (comptime FeatureFlags.export_star_redirect) {
|
||
// If the file only contains "export * from './blah'
|
||
// we pretend the file never existed in the first place.
|
||
// the semantic difference here is in export default statements
|
||
// note: export_star_import_records are not filled in yet
|
||
|
||
if (before.items.len > 0 and p.import_records.items.len == 1) {
|
||
const export_star_redirect: ?*S.ExportStar = brk: {
|
||
var export_star: ?*S.ExportStar = null;
|
||
for (before.items) |part| {
|
||
for (part.stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_export_star => |star| {
|
||
if (star.alias != null) {
|
||
break :brk null;
|
||
}
|
||
|
||
if (export_star != null) {
|
||
break :brk null;
|
||
}
|
||
|
||
export_star = star;
|
||
},
|
||
.s_empty, .s_comment => {},
|
||
else => {
|
||
break :brk null;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
break :brk export_star;
|
||
};
|
||
|
||
if (export_star_redirect) |star| {
|
||
return js_ast.Result{
|
||
.ast = .{
|
||
.allocator = p.allocator,
|
||
.import_records = ImportRecord.List.init(p.import_records.items),
|
||
.redirect_import_record_index = star.import_record_index,
|
||
.named_imports = p.named_imports,
|
||
.named_exports = p.named_exports,
|
||
},
|
||
};
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Analyze cross-part dependencies for tree shaking and code splitting
|
||
var exports_kind = js_ast.ExportsKind.none;
|
||
const exports_ref_usage_count = p.symbols.items[p.exports_ref.innerIndex()].use_count_estimate;
|
||
const uses_exports_ref = exports_ref_usage_count > 0;
|
||
|
||
if (uses_exports_ref and p.commonjs_named_exports.count() > 0 and !force_esm) {
|
||
p.deoptimizeCommonJSNamedExports();
|
||
}
|
||
|
||
const uses_module_ref = p.symbols.items[p.module_ref.innerIndex()].use_count_estimate > 0;
|
||
|
||
var wrap_mode: WrapMode = .none;
|
||
|
||
if (p.isDeoptimizedCommonJS()) {
|
||
exports_kind = .cjs;
|
||
} else if (p.esm_export_keyword.len > 0 or p.top_level_await_keyword.len > 0) {
|
||
exports_kind = .esm;
|
||
} else if (uses_exports_ref or uses_module_ref or p.has_top_level_return or p.has_with_scope) {
|
||
exports_kind = .cjs;
|
||
if (p.options.features.commonjs_at_runtime) {
|
||
wrap_mode = .bun_commonjs;
|
||
|
||
const import_record: ?*const ImportRecord = brk: {
|
||
for (p.import_records.items) |*import_record| {
|
||
if (import_record.is_internal or import_record.is_unused) continue;
|
||
if (import_record.kind == .stmt) break :brk import_record;
|
||
}
|
||
|
||
break :brk null;
|
||
};
|
||
|
||
// make it an error to use an import statement with a commonjs exports usage
|
||
if (import_record) |record| {
|
||
// find the usage of the export symbol
|
||
|
||
var notes = ListManaged(logger.Data).init(p.allocator);
|
||
|
||
try notes.append(logger.Data{
|
||
.text = try std.fmt.allocPrint(p.allocator, "Try require({}) instead", .{bun.fmt.QuotedFormatter{ .text = record.path.text }}),
|
||
});
|
||
|
||
if (uses_module_ref) {
|
||
try notes.append(logger.Data{
|
||
.text = "This file is CommonJS because 'module' was used",
|
||
});
|
||
}
|
||
|
||
if (uses_exports_ref) {
|
||
try notes.append(logger.Data{
|
||
.text = "This file is CommonJS because 'exports' was used",
|
||
});
|
||
}
|
||
|
||
if (p.has_top_level_return) {
|
||
try notes.append(logger.Data{
|
||
.text = "This file is CommonJS because top-level return was used",
|
||
});
|
||
}
|
||
|
||
if (p.has_with_scope) {
|
||
try notes.append(logger.Data{
|
||
.text = "This file is CommonJS because a \"with\" statement is used",
|
||
});
|
||
}
|
||
|
||
try p.log.addRangeErrorWithNotes(p.source, record.range, "Cannot use import statement with CommonJS-only features", notes.items);
|
||
}
|
||
}
|
||
} else {
|
||
switch (p.options.module_type) {
|
||
// ".cjs" or ".cts" or ("type: commonjs" and (".js" or ".jsx" or ".ts" or ".tsx"))
|
||
.cjs => {
|
||
exports_kind = .cjs;
|
||
},
|
||
.esm => {
|
||
exports_kind = .esm;
|
||
},
|
||
.unknown => {
|
||
// Divergence from esbuild and Node.js: we default to ESM
|
||
// when there are no exports.
|
||
//
|
||
// However, this breaks certain packages.
|
||
// For example, the checkpoint-client used by
|
||
// Prisma does an eval("__dirname") but does not export
|
||
// anything.
|
||
//
|
||
// If they use an import statement, we say it's ESM because that's not allowed in CommonJS files.
|
||
const uses_any_import_statements = brk: {
|
||
for (p.import_records.items) |*import_record| {
|
||
if (import_record.is_internal or import_record.is_unused) continue;
|
||
if (import_record.kind == .stmt) break :brk true;
|
||
}
|
||
|
||
break :brk false;
|
||
};
|
||
|
||
if (uses_any_import_statements) {
|
||
exports_kind = .esm;
|
||
|
||
// Otherwise, if they use CommonJS features its CommonJS
|
||
} else if (p.symbols.items[p.require_ref.innerIndex()].use_count_estimate > 0 or uses_dirname or uses_filename) {
|
||
exports_kind = .cjs;
|
||
} else {
|
||
// If unknown, we default to ESM
|
||
exports_kind = .esm;
|
||
}
|
||
},
|
||
}
|
||
|
||
if (exports_kind == .cjs and p.options.features.commonjs_at_runtime) {
|
||
wrap_mode = .bun_commonjs;
|
||
}
|
||
}
|
||
|
||
// Handle dirname and filename at runtime.
|
||
//
|
||
// If we reach this point, it means:
|
||
//
|
||
// 1) we are building an ESM file that uses __dirname or __filename
|
||
// 2) we are targeting bun's runtime.
|
||
// 3) we are not bundling.
|
||
//
|
||
if (exports_kind == .esm and (uses_dirname or uses_filename)) {
|
||
bun.assert(!p.options.bundle);
|
||
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
|
||
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
|
||
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
|
||
if (uses_dirname) {
|
||
// var __dirname = import.meta
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty),
|
||
.value = p.newExpr(
|
||
E.Dot{
|
||
.name = "dir",
|
||
.name_loc = logger.Loc.Empty,
|
||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = p.dirname_ref, .is_top_level = true });
|
||
}
|
||
if (uses_filename) {
|
||
// var __filename = import.meta.path
|
||
decls[@as(usize, @intFromBool(uses_dirname))] = .{
|
||
.binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty),
|
||
.value = p.newExpr(
|
||
E.Dot{
|
||
.name = "path",
|
||
.name_loc = logger.Loc.Empty,
|
||
.target = p.newExpr(E.ImportMeta{}, logger.Loc.Empty),
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = p.filename_ref, .is_top_level = true });
|
||
}
|
||
|
||
var part_stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||
part_stmts[0] = p.s(S.Local{
|
||
.kind = .k_var,
|
||
.decls = Decl.List.init(decls),
|
||
}, logger.Loc.Empty);
|
||
before.append(js_ast.Part{
|
||
.stmts = part_stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.tag = .dirname_filename,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
if (exports_kind == .esm and p.commonjs_named_exports.count() > 0 and !p.unwrap_all_requires and !force_esm) {
|
||
exports_kind = .esm_with_dynamic_fallback_from_cjs;
|
||
}
|
||
|
||
// Auto inject jest globals into the test file
|
||
if (p.options.features.inject_jest_globals) outer: {
|
||
var jest: *Jest = &p.jest;
|
||
|
||
for (p.import_records.items) |*item| {
|
||
// skip if they did import it
|
||
if (strings.eqlComptime(item.path.text, "bun:test") or strings.eqlComptime(item.path.text, "@jest/globals") or strings.eqlComptime(item.path.text, "vitest")) {
|
||
if (p.options.features.runtime_transpiler_cache) |cache| {
|
||
// If we rewrote import paths, we need to disable the runtime transpiler cache
|
||
if (!strings.eqlComptime(item.path.text, "bun:test")) {
|
||
cache.input_hash = null;
|
||
}
|
||
}
|
||
|
||
break :outer;
|
||
}
|
||
}
|
||
|
||
// if they didn't use any of the jest globals, don't inject it, I guess.
|
||
const items_count = brk: {
|
||
var count: usize = 0;
|
||
inline for (comptime std.meta.fieldNames(Jest)) |symbol_name| {
|
||
count += @intFromBool(p.symbols.items[@field(jest, symbol_name).innerIndex()].use_count_estimate > 0);
|
||
}
|
||
|
||
break :brk count;
|
||
};
|
||
if (items_count == 0)
|
||
break :outer;
|
||
|
||
const import_record_id = p.addImportRecord(.stmt, logger.Loc.Empty, "bun:test");
|
||
var import_record: *ImportRecord = &p.import_records.items[import_record_id];
|
||
import_record.tag = .bun_test;
|
||
|
||
var declared_symbols = js_ast.DeclaredSymbol.List{};
|
||
try declared_symbols.ensureTotalCapacity(p.allocator, items_count);
|
||
var clauses: []js_ast.ClauseItem = p.allocator.alloc(js_ast.ClauseItem, items_count) catch unreachable;
|
||
var clause_i: usize = 0;
|
||
inline for (comptime std.meta.fieldNames(Jest)) |symbol_name| {
|
||
if (p.symbols.items[@field(jest, symbol_name).innerIndex()].use_count_estimate > 0) {
|
||
clauses[clause_i] = js_ast.ClauseItem{
|
||
.name = .{ .ref = @field(jest, symbol_name), .loc = logger.Loc.Empty },
|
||
.alias = symbol_name,
|
||
.alias_loc = logger.Loc.Empty,
|
||
.original_name = "",
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = @field(jest, symbol_name), .is_top_level = true });
|
||
clause_i += 1;
|
||
}
|
||
}
|
||
|
||
const import_stmt = p.s(
|
||
S.Import{
|
||
.namespace_ref = p.declareSymbol(.unbound, logger.Loc.Empty, "bun_test_import_namespace_for_internal_use_only") catch unreachable,
|
||
.items = clauses,
|
||
.import_record_index = import_record_id,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
var part_stmts = try p.allocator.alloc(Stmt, 1);
|
||
part_stmts[0] = import_stmt;
|
||
var import_record_indices = try p.allocator.alloc(u32, 1);
|
||
import_record_indices[0] = import_record_id;
|
||
before.append(js_ast.Part{
|
||
.stmts = part_stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.import_record_indices = bun.BabyList(u32).init(import_record_indices),
|
||
.tag = .bun_test,
|
||
}) catch unreachable;
|
||
|
||
// If we injected jest globals, we need to disable the runtime transpiler cache
|
||
if (p.options.features.runtime_transpiler_cache) |cache| {
|
||
cache.input_hash = null;
|
||
}
|
||
}
|
||
|
||
if (p.has_called_runtime) {
|
||
var runtime_imports: [RuntimeImports.all.len]u8 = undefined;
|
||
var iter = p.runtime_imports.iter();
|
||
var i: usize = 0;
|
||
while (iter.next()) |entry| {
|
||
runtime_imports[i] = @as(u8, @intCast(entry.key));
|
||
i += 1;
|
||
}
|
||
|
||
std.sort.pdq(
|
||
u8,
|
||
runtime_imports[0..i],
|
||
{},
|
||
struct {
|
||
pub fn isLessThan(_: void, a: u8, b: u8) bool {
|
||
return std.math.order(
|
||
RuntimeImports.all_sorted_index[a],
|
||
RuntimeImports.all_sorted_index[b],
|
||
) == .lt;
|
||
}
|
||
}.isLessThan,
|
||
);
|
||
|
||
if (i > 0) {
|
||
p.generateImportStmt(
|
||
RuntimeImports.Name,
|
||
runtime_imports[0..i],
|
||
&before,
|
||
p.runtime_imports,
|
||
null,
|
||
"import_",
|
||
true,
|
||
) catch unreachable;
|
||
}
|
||
}
|
||
|
||
// handle new way to do automatic JSX imports which fixes symbol collision issues
|
||
if (p.options.jsx.parse and p.options.features.auto_import_jsx and p.options.jsx.runtime == .automatic) {
|
||
var buf = [3]string{ "", "", "" };
|
||
const runtime_import_names = p.jsx_imports.runtimeImportNames(&buf);
|
||
|
||
if (runtime_import_names.len > 0) {
|
||
p.generateImportStmt(
|
||
p.options.jsx.importSource(),
|
||
runtime_import_names,
|
||
&before,
|
||
&p.jsx_imports,
|
||
null,
|
||
"",
|
||
false,
|
||
) catch unreachable;
|
||
}
|
||
|
||
const source_import_names = p.jsx_imports.sourceImportNames();
|
||
if (source_import_names.len > 0) {
|
||
p.generateImportStmt(
|
||
p.options.jsx.package_name,
|
||
source_import_names,
|
||
&before,
|
||
&p.jsx_imports,
|
||
null,
|
||
"",
|
||
false,
|
||
) catch unreachable;
|
||
}
|
||
}
|
||
|
||
if (p.server_components_wrap_ref.isValid()) {
|
||
const fw = p.options.framework orelse @panic("server components requires a framework configured, but none was set");
|
||
const sc = fw.server_components.?;
|
||
try p.generateReactRefreshImport(
|
||
&before,
|
||
sc.server_runtime_import,
|
||
&.{
|
||
.{
|
||
.name = sc.server_register_client_reference,
|
||
.ref = p.server_components_wrap_ref,
|
||
.enabled = true,
|
||
},
|
||
},
|
||
);
|
||
}
|
||
|
||
if (p.react_refresh.register_used or p.react_refresh.signature_used) {
|
||
try p.generateReactRefreshImport(
|
||
&before,
|
||
if (p.options.framework) |fw| fw.react_fast_refresh.?.import_source else "react-refresh/runtime",
|
||
&.{
|
||
.{
|
||
.name = "register",
|
||
.enabled = p.react_refresh.register_used,
|
||
.ref = p.react_refresh.register_ref,
|
||
},
|
||
.{
|
||
.name = "createSignatureFunctionForTransform",
|
||
.enabled = p.react_refresh.signature_used,
|
||
.ref = p.react_refresh.create_signature_ref,
|
||
},
|
||
},
|
||
);
|
||
}
|
||
|
||
var parts_slice: []js_ast.Part = &([_]js_ast.Part{});
|
||
|
||
if (before.items.len > 0 or after.items.len > 0) {
|
||
const before_len = before.items.len;
|
||
const after_len = after.items.len;
|
||
const parts_len = parts.items.len;
|
||
|
||
const _parts = try p.allocator.alloc(
|
||
js_ast.Part,
|
||
before_len +
|
||
after_len +
|
||
parts_len,
|
||
);
|
||
|
||
var remaining_parts = _parts;
|
||
if (before_len > 0) {
|
||
const parts_to_copy = before.items;
|
||
bun.copy(js_ast.Part, remaining_parts, parts_to_copy);
|
||
remaining_parts = remaining_parts[parts_to_copy.len..];
|
||
}
|
||
|
||
if (parts_len > 0) {
|
||
const parts_to_copy = parts.items;
|
||
bun.copy(js_ast.Part, remaining_parts, parts_to_copy);
|
||
remaining_parts = remaining_parts[parts_to_copy.len..];
|
||
}
|
||
|
||
if (after_len > 0) {
|
||
const parts_to_copy = after.items;
|
||
bun.copy(js_ast.Part, remaining_parts, parts_to_copy);
|
||
}
|
||
|
||
parts_slice = _parts;
|
||
} else {
|
||
parts_slice = parts.items;
|
||
}
|
||
|
||
// Pop the module scope to apply the "ContainsDirectEval" rules
|
||
// p.popScope();
|
||
|
||
if (comptime Environment.isNative and bun.FeatureFlags.runtime_transpiler_cache) {
|
||
const runtime_transpiler_cache: ?*bun.JSC.RuntimeTranspilerCache = p.options.features.runtime_transpiler_cache;
|
||
if (runtime_transpiler_cache) |cache| {
|
||
if (p.macro_call_count != 0) {
|
||
// disable this for:
|
||
// - macros
|
||
cache.input_hash = null;
|
||
} else {
|
||
cache.exports_kind = exports_kind;
|
||
}
|
||
}
|
||
}
|
||
|
||
return js_ast.Result{ .ast = try p.toAST(parts_slice, exports_kind, wrap_mode, hashbang) };
|
||
}
|
||
|
||
pub fn init(_options: Options, log: *logger.Log, source: *const logger.Source, define: *Define, allocator: Allocator) !Parser {
|
||
return Parser{
|
||
.options = _options,
|
||
.allocator = allocator,
|
||
.lexer = try js_lexer.Lexer.init(log, source.*, allocator),
|
||
.define = define,
|
||
.source = source,
|
||
.log = log,
|
||
};
|
||
}
|
||
};
|
||
|
||
const FindLabelSymbolResult = struct { ref: Ref, is_loop: bool, found: bool = false };
|
||
|
||
const FindSymbolResult = struct {
|
||
ref: Ref,
|
||
declare_loc: ?logger.Loc = null,
|
||
is_inside_with_scope: bool = false,
|
||
};
|
||
const ExportClauseResult = struct {
|
||
clauses: []js_ast.ClauseItem = &([_]js_ast.ClauseItem{}),
|
||
is_single_line: bool = false,
|
||
had_type_only_exports: bool = false,
|
||
};
|
||
|
||
const DeferredTsDecorators = struct {
|
||
values: []js_ast.Expr,
|
||
|
||
// If this turns out to be a "declare class" statement, we need to undo the
|
||
// scopes that were potentially pushed while parsing the decorator arguments.
|
||
scope_index: usize,
|
||
};
|
||
|
||
const LexicalDecl = enum(u8) { forbid, allow_all, allow_fn_inside_if, allow_fn_inside_label };
|
||
|
||
const ParseClassOptions = struct {
|
||
ts_decorators: []Expr = &[_]Expr{},
|
||
allow_ts_decorators: bool = false,
|
||
is_type_script_declare: bool = false,
|
||
};
|
||
|
||
const ParseStatementOptions = struct {
|
||
ts_decorators: ?DeferredTsDecorators = null,
|
||
lexical_decl: LexicalDecl = .forbid,
|
||
is_module_scope: bool = false,
|
||
is_namespace_scope: bool = false,
|
||
is_export: bool = false,
|
||
is_using_statement: bool = false,
|
||
is_name_optional: bool = false, // For "export default" pseudo-statements,
|
||
is_typescript_declare: bool = false,
|
||
is_for_loop_init: bool = false,
|
||
|
||
pub fn hasDecorators(self: *ParseStatementOptions) bool {
|
||
const decs = self.ts_decorators orelse return false;
|
||
return decs.values.len > 0;
|
||
}
|
||
};
|
||
|
||
var e_missing_data = E.Missing{};
|
||
var s_missing = S.Empty{};
|
||
var nullExprData = Expr.Data{ .e_missing = e_missing_data };
|
||
var nullStmtData = Stmt.Data{ .s_empty = s_missing };
|
||
pub const Prefill = struct {
|
||
pub const HotModuleReloading = struct {
|
||
pub var DebugEnabledArgs = [_]Expr{
|
||
Expr{ .data = .{ .e_boolean = E.Boolean{ .value = true } }, .loc = logger.Loc.Empty },
|
||
};
|
||
pub var DebugDisabled = [_]Expr{
|
||
Expr{ .data = .{ .e_boolean = E.Boolean{ .value = false } }, .loc = logger.Loc.Empty },
|
||
};
|
||
pub var ActivateString = E.String{
|
||
.data = "activate",
|
||
};
|
||
pub var ActivateIndex = E.Index{
|
||
.index = .{
|
||
.data = .{
|
||
.e_string = &ActivateString,
|
||
},
|
||
.loc = logger.Loc.Empty,
|
||
},
|
||
.target = undefined,
|
||
};
|
||
};
|
||
pub const StringLiteral = struct {
|
||
pub const Key = [3]u8{ 'k', 'e', 'y' };
|
||
pub const Children = [_]u8{ 'c', 'h', 'i', 'l', 'd', 'r', 'e', 'n' };
|
||
pub const Filename = [_]u8{ 'f', 'i', 'l', 'e', 'N', 'a', 'm', 'e' };
|
||
pub const LineNumber = [_]u8{ 'l', 'i', 'n', 'e', 'N', 'u', 'm', 'b', 'e', 'r' };
|
||
pub const ColumnNumber = [_]u8{ 'c', 'o', 'l', 'u', 'm', 'n', 'N', 'u', 'm', 'b', 'e', 'r' };
|
||
};
|
||
pub const Value = struct {
|
||
pub const EThis = E.This{};
|
||
pub const Zero = E.Number{ .value = 0.0 };
|
||
};
|
||
pub const String = struct {
|
||
pub var Key = E.String{ .data = &Prefill.StringLiteral.Key };
|
||
pub var Children = E.String{ .data = &Prefill.StringLiteral.Children };
|
||
pub var Filename = E.String{ .data = &Prefill.StringLiteral.Filename };
|
||
pub var LineNumber = E.String{ .data = &Prefill.StringLiteral.LineNumber };
|
||
pub var ColumnNumber = E.String{ .data = &Prefill.StringLiteral.ColumnNumber };
|
||
|
||
pub var @"$$typeof" = E.String{ .data = "$$typeof" };
|
||
pub var @"type" = E.String{ .data = "type" };
|
||
pub var ref = E.String{ .data = "ref" };
|
||
pub var props = E.String{ .data = "props" };
|
||
pub var _owner = E.String{ .data = "_owner" };
|
||
pub var REACT_ELEMENT_TYPE = E.String{ .data = "react.element" };
|
||
};
|
||
pub const Data = struct {
|
||
pub var BMissing = B{ .b_missing = BMissing_ };
|
||
pub var BMissing_ = B.Missing{};
|
||
|
||
pub var EMissing = Expr.Data{ .e_missing = EMissing_ };
|
||
pub var EMissing_ = E.Missing{};
|
||
|
||
pub var SEmpty = Stmt.Data{ .s_empty = SEmpty_ };
|
||
pub var SEmpty_ = S.Empty{};
|
||
|
||
pub var Filename = Expr.Data{ .e_string = &Prefill.String.Filename };
|
||
pub var LineNumber = Expr.Data{ .e_string = &Prefill.String.LineNumber };
|
||
pub var ColumnNumber = Expr.Data{ .e_string = &Prefill.String.ColumnNumber };
|
||
pub var @"$$typeof" = Expr.Data{ .e_string = &Prefill.String.@"$$typeof" };
|
||
pub var key = Expr.Data{ .e_string = &Prefill.String.Key };
|
||
pub var @"type" = Expr.Data{ .e_string = &Prefill.String.type };
|
||
pub var ref = Expr.Data{ .e_string = &Prefill.String.ref };
|
||
pub var props = Expr.Data{ .e_string = &Prefill.String.props };
|
||
pub var _owner = Expr.Data{ .e_string = &Prefill.String._owner };
|
||
pub var REACT_ELEMENT_TYPE = Expr.Data{ .e_string = &Prefill.String.REACT_ELEMENT_TYPE };
|
||
pub const This = Expr.Data{ .e_this = E.This{} };
|
||
pub const Zero = Expr.Data{ .e_number = Value.Zero };
|
||
};
|
||
pub const Runtime = struct {
|
||
// pub var JSXFilename = "__jsxFilename";
|
||
// pub var MarkAsModule = "__markAsModule";
|
||
// pub var CommonJS = "__commonJS";
|
||
// pub var ToModule = "__toModule";
|
||
// const JSXShortname = "jsx";
|
||
};
|
||
};
|
||
|
||
const ReactJSX = struct {
|
||
hoisted_elements: std.ArrayHashMapUnmanaged(Ref, G.Decl, bun.ArrayIdentityContext, false) = .{},
|
||
};
|
||
|
||
var keyExprData = Expr.Data{ .e_string = &Prefill.String.Key };
|
||
var jsxChildrenKeyData = Expr.Data{ .e_string = &Prefill.String.Children };
|
||
var nullExprValueData = E.Null{};
|
||
var falseExprValueData = E.Boolean{ .value = false };
|
||
var nullValueExpr = Expr.Data{ .e_null = nullExprValueData };
|
||
var falseValueExpr = Expr.Data{ .e_boolean = E.Boolean{ .value = false } };
|
||
|
||
pub const ImportOrRequireScanResults = struct {
|
||
import_records: List(ImportRecord),
|
||
};
|
||
|
||
const JSXTransformType = enum {
|
||
none,
|
||
react,
|
||
};
|
||
|
||
const ParserFeatures = struct {
|
||
typescript: bool = false,
|
||
jsx: JSXTransformType = .none,
|
||
scan_only: bool = false,
|
||
};
|
||
|
||
pub const ImportItemForNamespaceMap = bun.StringArrayHashMap(LocRef);
|
||
|
||
pub const KnownGlobal = enum {
|
||
WeakSet,
|
||
WeakMap,
|
||
Date,
|
||
Set,
|
||
Map,
|
||
Headers,
|
||
Response,
|
||
TextEncoder,
|
||
TextDecoder,
|
||
|
||
pub const map = bun.ComptimeEnumMap(KnownGlobal);
|
||
|
||
pub noinline fn maybeMarkConstructorAsPure(e: *E.New, symbols: []const Symbol) void {
|
||
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
|
||
const symbol = &symbols[id.innerIndex()];
|
||
if (symbol.kind != .unbound)
|
||
return;
|
||
|
||
const constructor = map.get(symbol.original_name) orelse return;
|
||
|
||
switch (constructor) {
|
||
.WeakSet, .WeakMap => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new WeakSet()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
|
||
return;
|
||
}
|
||
|
||
if (n == 1) {
|
||
switch (e.args.ptr[0].data) {
|
||
.e_null, .e_undefined => {
|
||
// "new WeakSet(null)" is pure
|
||
// "new WeakSet(void 0)" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
},
|
||
.e_array => |array| {
|
||
if (array.items.len == 0) {
|
||
// "new WeakSet([])" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
} else {
|
||
// "new WeakSet([x])" is impure because an exception is thrown if "x" is not an object
|
||
}
|
||
},
|
||
else => {
|
||
// "new WeakSet(x)" is impure because the iterator for "x" could have side effects
|
||
},
|
||
}
|
||
}
|
||
},
|
||
.Date => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new Date()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
|
||
return;
|
||
}
|
||
|
||
if (n == 1) {
|
||
switch (e.args.ptr[0].knownPrimitive()) {
|
||
.null, .undefined, .boolean, .number, .string => {
|
||
// "new Date('')" is pure
|
||
// "new Date(0)" is pure
|
||
// "new Date(null)" is pure
|
||
// "new Date(true)" is pure
|
||
// "new Date(false)" is pure
|
||
// "new Date(undefined)" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
},
|
||
else => {
|
||
// "new Date(x)" is impure because the argument could be a string with side effects
|
||
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
.Set => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new Set()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
return;
|
||
}
|
||
|
||
if (n == 1) {
|
||
switch (e.args.ptr[0].data) {
|
||
.e_array, .e_null, .e_undefined => {
|
||
// "new Set([a, b, c])" is pure
|
||
// "new Set(null)" is pure
|
||
// "new Set(void 0)" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
},
|
||
else => {
|
||
// "new Set(x)" is impure because the iterator for "x" could have side effects
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
.Headers => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new Headers()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
|
||
return;
|
||
}
|
||
},
|
||
|
||
.Response => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new Response()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
|
||
return;
|
||
}
|
||
|
||
if (n == 1) {
|
||
switch (e.args.ptr[0].knownPrimitive()) {
|
||
.null, .undefined, .boolean, .number, .string => {
|
||
// "new Response('')" is pure
|
||
// "new Response(0)" is pure
|
||
// "new Response(null)" is pure
|
||
// "new Response(true)" is pure
|
||
// "new Response(false)" is pure
|
||
// "new Response(undefined)" is pure
|
||
|
||
e.can_be_unwrapped_if_unused = true;
|
||
},
|
||
else => {
|
||
// "new Response(x)" is impure
|
||
},
|
||
}
|
||
}
|
||
},
|
||
.TextDecoder, .TextEncoder => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new TextEncoder()" is pure
|
||
// "new TextDecoder()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
|
||
return;
|
||
}
|
||
|
||
// We _could_ validate the encoding argument
|
||
// But let's not bother
|
||
},
|
||
|
||
.Map => {
|
||
const n = e.args.len;
|
||
|
||
if (n == 0) {
|
||
// "new Map()" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
return;
|
||
}
|
||
|
||
if (n == 1) {
|
||
switch (e.args.ptr[0].data) {
|
||
.e_null, .e_undefined => {
|
||
// "new Map(null)" is pure
|
||
// "new Map(void 0)" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
},
|
||
.e_array => |array| {
|
||
var all_items_are_arrays = true;
|
||
for (array.items.slice()) |item| {
|
||
if (item.data != .e_array) {
|
||
all_items_are_arrays = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (all_items_are_arrays) {
|
||
// "new Map([[a, b], [c, d]])" is pure
|
||
e.can_be_unwrapped_if_unused = true;
|
||
}
|
||
},
|
||
else => {
|
||
// "new Map(x)" is impure because the iterator for "x" could have side effects
|
||
},
|
||
}
|
||
}
|
||
},
|
||
}
|
||
}
|
||
};
|
||
|
||
pub const MacroState = struct {
|
||
refs: MacroRefs,
|
||
prepend_stmts: *ListManaged(Stmt) = undefined,
|
||
imports: std.AutoArrayHashMap(i32, Ref),
|
||
|
||
pub fn init(allocator: Allocator) MacroState {
|
||
return MacroState{
|
||
.refs = MacroRefs.init(allocator),
|
||
.prepend_stmts = undefined,
|
||
.imports = std.AutoArrayHashMap(i32, Ref).init(allocator),
|
||
};
|
||
}
|
||
};
|
||
|
||
const Jest = struct {
|
||
expect: Ref = Ref.None,
|
||
describe: Ref = Ref.None,
|
||
@"test": Ref = Ref.None,
|
||
it: Ref = Ref.None,
|
||
beforeEach: Ref = Ref.None,
|
||
afterEach: Ref = Ref.None,
|
||
beforeAll: Ref = Ref.None,
|
||
afterAll: Ref = Ref.None,
|
||
jest: Ref = Ref.None,
|
||
};
|
||
|
||
// workaround for https://github.com/ziglang/zig/issues/10903
|
||
fn NewParser(
|
||
comptime parser_features: ParserFeatures,
|
||
) type {
|
||
return NewParser_(
|
||
parser_features.typescript,
|
||
parser_features.jsx,
|
||
parser_features.scan_only,
|
||
);
|
||
}
|
||
fn NewParser_(
|
||
comptime parser_feature__typescript: bool,
|
||
comptime parser_feature__jsx: JSXTransformType,
|
||
comptime parser_feature__scan_only: bool,
|
||
) type {
|
||
const js_parser_features: ParserFeatures = .{
|
||
.typescript = parser_feature__typescript,
|
||
.jsx = parser_feature__jsx,
|
||
.scan_only = parser_feature__scan_only,
|
||
};
|
||
|
||
// P is for Parser!
|
||
return struct {
|
||
const js_parser_jsx = js_parser_features.jsx;
|
||
const is_typescript_enabled = js_parser_features.typescript;
|
||
const is_jsx_enabled = js_parser_jsx != .none;
|
||
const only_scan_imports_and_do_not_visit = js_parser_features.scan_only;
|
||
const ImportRecordList = if (only_scan_imports_and_do_not_visit) *std.ArrayList(ImportRecord) else std.ArrayList(ImportRecord);
|
||
const NamedImportsType = if (only_scan_imports_and_do_not_visit) *js_ast.Ast.NamedImports else js_ast.Ast.NamedImports;
|
||
const NeedsJSXType = if (only_scan_imports_and_do_not_visit) bool else void;
|
||
const track_symbol_usage_during_parse_pass = only_scan_imports_and_do_not_visit and is_typescript_enabled;
|
||
const ParsePassSymbolUsageType = if (track_symbol_usage_during_parse_pass) *ScanPassResult.ParsePassSymbolUsageMap else void;
|
||
|
||
pub const parser_features: ParserFeatures = js_parser_features;
|
||
const P = @This();
|
||
pub const jsx_transform_type: JSXTransformType = js_parser_jsx;
|
||
const allow_macros = FeatureFlags.is_macro_enabled;
|
||
const MacroCallCountType = if (allow_macros) u32 else u0;
|
||
macro: MacroState = undefined,
|
||
allocator: Allocator,
|
||
options: Parser.Options,
|
||
log: *logger.Log,
|
||
define: *Define,
|
||
source: *const logger.Source,
|
||
lexer: js_lexer.Lexer,
|
||
allow_in: bool = false,
|
||
allow_private_identifiers: bool = false,
|
||
|
||
has_top_level_return: bool = false,
|
||
latest_return_had_semicolon: bool = false,
|
||
has_import_meta: bool = false,
|
||
has_es_module_syntax: bool = false,
|
||
top_level_await_keyword: logger.Range = logger.Range.None,
|
||
fn_or_arrow_data_parse: FnOrArrowDataParse = FnOrArrowDataParse{},
|
||
fn_or_arrow_data_visit: FnOrArrowDataVisit = FnOrArrowDataVisit{},
|
||
fn_only_data_visit: FnOnlyDataVisit = FnOnlyDataVisit{},
|
||
allocated_names: List(string) = .{},
|
||
// allocated_names: ListManaged(string) = ListManaged(string).init(bun.default_allocator),
|
||
// allocated_names_pool: ?*AllocatedNamesPool.Node = null,
|
||
latest_arrow_arg_loc: logger.Loc = logger.Loc.Empty,
|
||
forbid_suffix_after_as_loc: logger.Loc = logger.Loc.Empty,
|
||
current_scope: *js_ast.Scope = undefined,
|
||
scopes_for_current_part: List(*js_ast.Scope) = .{},
|
||
symbols: ListManaged(js_ast.Symbol) = undefined,
|
||
ts_use_counts: List(u32) = .{},
|
||
exports_ref: Ref = Ref.None,
|
||
require_ref: Ref = Ref.None,
|
||
module_ref: Ref = Ref.None,
|
||
filename_ref: Ref = Ref.None,
|
||
dirname_ref: Ref = Ref.None,
|
||
import_meta_ref: Ref = Ref.None,
|
||
scopes_in_order_visitor_index: usize = 0,
|
||
has_classic_runtime_warned: bool = false,
|
||
macro_call_count: MacroCallCountType = 0,
|
||
|
||
hoisted_ref_for_sloppy_mode_block_fn: RefRefMap = .{},
|
||
|
||
/// Used for transforming export default -> module.exports
|
||
has_export_default: bool = false,
|
||
has_export_keyword: bool = false,
|
||
|
||
// Used for forcing CommonJS
|
||
has_with_scope: bool = false,
|
||
|
||
is_file_considered_to_have_esm_exports: bool = false,
|
||
|
||
has_called_runtime: bool = false,
|
||
|
||
legacy_cjs_import_stmts: std.ArrayList(Stmt),
|
||
|
||
injected_define_symbols: List(Ref) = .{},
|
||
symbol_uses: SymbolUseMap = .{},
|
||
declared_symbols: DeclaredSymbol.List = .{},
|
||
declared_symbols_for_reuse: DeclaredSymbol.List = .{},
|
||
runtime_imports: RuntimeImports = RuntimeImports{},
|
||
|
||
/// Used with unwrap_commonjs_packages
|
||
imports_to_convert_from_require: List(DeferredImportNamespace) = .{},
|
||
unwrap_all_requires: bool = false,
|
||
|
||
commonjs_named_exports: js_ast.Ast.CommonJSNamedExports = .{},
|
||
commonjs_named_exports_deoptimized: bool = false,
|
||
commonjs_module_exports_assigned_deoptimized: bool = false,
|
||
commonjs_named_exports_needs_conversion: u32 = std.math.maxInt(u32),
|
||
had_commonjs_named_exports_this_visit: bool = false,
|
||
commonjs_replacement_stmts: StmtNodeList = &.{},
|
||
|
||
parse_pass_symbol_uses: ParsePassSymbolUsageType = undefined,
|
||
|
||
/// Used by commonjs_at_runtime
|
||
has_commonjs_export_names: bool = false,
|
||
|
||
/// When this flag is enabled, we attempt to fold all expressions that
|
||
/// TypeScript would consider to be "constant expressions". This flag is
|
||
/// enabled inside each enum body block since TypeScript requires numeric
|
||
/// constant folding in enum definitions.
|
||
///
|
||
/// We also enable this flag in certain cases in JavaScript files such as when
|
||
/// parsing "const" declarations at the top of a non-ESM file, but we still
|
||
/// reuse TypeScript's notion of "constant expressions" for our own convenience.
|
||
///
|
||
/// As of TypeScript 5.0, a "constant expression" is defined as follows:
|
||
///
|
||
/// An expression is considered a constant expression if it is
|
||
///
|
||
/// * a number or string literal,
|
||
/// * a unary +, -, or ~ applied to a numeric constant expression,
|
||
/// * a binary +, -, *, /, %, **, <<, >>, >>>, |, &, ^ applied to two numeric constant expressions,
|
||
/// * a binary + applied to two constant expressions whereof at least one is a string,
|
||
/// * a template expression where each substitution expression is a constant expression,
|
||
/// * a parenthesized constant expression,
|
||
/// * a dotted name (e.g. x.y.z) that references a const variable with a constant expression initializer and no type annotation,
|
||
/// * a dotted name that references an enum member with an enum literal type, or
|
||
/// * a dotted name indexed by a string literal (e.g. x.y["z"]) that references an enum member with an enum literal type.
|
||
///
|
||
/// More detail: https://github.com/microsoft/TypeScript/pull/50528. Note that
|
||
/// we don't implement certain items in this list. For example, we don't do all
|
||
/// number-to-string conversions since ours might differ from how JavaScript
|
||
/// would do it, which would be a correctness issue.
|
||
///
|
||
/// This flag is also set globally when minify_syntax is enabled, in which this means
|
||
/// we always fold constant expressions.
|
||
should_fold_typescript_constant_expressions: bool = false,
|
||
|
||
emitted_namespace_vars: RefMap = RefMap{},
|
||
is_exported_inside_namespace: RefRefMap = .{},
|
||
local_type_names: StringBoolMap = StringBoolMap{},
|
||
|
||
// This is the reference to the generated function argument for the namespace,
|
||
// which is different than the reference to the namespace itself:
|
||
//
|
||
// namespace ns {
|
||
// }
|
||
//
|
||
// The code above is transformed into something like this:
|
||
//
|
||
// var ns1;
|
||
// (function(ns2) {
|
||
// })(ns1 or (ns1 = {}));
|
||
//
|
||
// This variable is "ns2" not "ns1". It is only used during the second
|
||
// "visit" pass.
|
||
enclosing_namespace_arg_ref: ?Ref = null,
|
||
|
||
jsx_imports: JSXImport.Symbols = .{},
|
||
|
||
/// only applicable when `.options.features.react_fast_refresh` is set.
|
||
/// populated before visit pass starts.
|
||
react_refresh: ReactRefresh = .{},
|
||
|
||
/// only applicable when `.options.features.server_components` is
|
||
/// configured to wrap exports. populated before visit pass starts.
|
||
server_components_wrap_ref: Ref = Ref.None,
|
||
|
||
jest: Jest = .{},
|
||
|
||
// Imports (both ES6 and CommonJS) are tracked at the top level
|
||
import_records: ImportRecordList,
|
||
import_records_for_current_part: List(u32) = .{},
|
||
export_star_import_records: List(u32) = .{},
|
||
import_symbol_property_uses: SymbolPropertyUseMap = .{},
|
||
|
||
// These are for handling ES6 imports and exports
|
||
esm_import_keyword: logger.Range = logger.Range.None,
|
||
esm_export_keyword: logger.Range = logger.Range.None,
|
||
enclosing_class_keyword: logger.Range = logger.Range.None,
|
||
import_items_for_namespace: std.AutoHashMapUnmanaged(Ref, ImportItemForNamespaceMap) = .{},
|
||
is_import_item: RefMap = .{},
|
||
named_imports: NamedImportsType,
|
||
named_exports: js_ast.Ast.NamedExports,
|
||
import_namespace_cc_map: Map(ImportNamespaceCallOrConstruct, bool) = .{},
|
||
|
||
// When we're only scanning the imports
|
||
// If they're using the automatic JSX runtime
|
||
// We won't know that we need to import JSX robustly because we don't track
|
||
// symbol counts. Instead, we ask:
|
||
// "Did we parse anything that looked like JSX"?
|
||
// If yes, then automatically add the JSX import.
|
||
needs_jsx_import: NeedsJSXType,
|
||
|
||
// The parser does two passes and we need to pass the scope tree information
|
||
// from the first pass to the second pass. That's done by tracking the calls
|
||
// to pushScopeForParsePass() and popScope() during the first pass in
|
||
// scopesInOrder.
|
||
//
|
||
// Then, when the second pass calls pushScopeForVisitPass() and popScope(),
|
||
// we consume entries from scopesInOrder and make sure they are in the same
|
||
// order. This way the second pass can efficiently use the same scope tree
|
||
// as the first pass without having to attach the scope tree to the AST.
|
||
//
|
||
// We need to split this into two passes because the pass that declares the
|
||
// symbols must be separate from the pass that binds identifiers to declared
|
||
// symbols to handle declaring a hoisted "var" symbol in a nested scope and
|
||
// binding a name to it in a parent or sibling scope.
|
||
scopes_in_order: ScopeOrderList = .{},
|
||
scope_order_to_visit: []ScopeOrder = &.{},
|
||
|
||
// These properties are for the visit pass, which runs after the parse pass.
|
||
// The visit pass binds identifiers to declared symbols, does constant
|
||
// folding, substitutes compile-time variable definitions, and lowers certain
|
||
// syntactic constructs as appropriate.
|
||
stmt_expr_value: Expr.Data,
|
||
call_target: Expr.Data,
|
||
delete_target: Expr.Data,
|
||
loop_body: Stmt.Data,
|
||
module_scope: *js_ast.Scope = undefined,
|
||
module_scope_directive_loc: logger.Loc = .{},
|
||
is_control_flow_dead: bool = false,
|
||
|
||
/// We must be careful to avoid revisiting nodes that have scopes.
|
||
is_revisit_for_substitution: bool = false,
|
||
|
||
method_call_must_be_replaced_with_undefined: bool = false,
|
||
|
||
// Inside a TypeScript namespace, an "export declare" statement can be used
|
||
// to cause a namespace to be emitted even though it has no other observable
|
||
// effect. This flag is used to implement this feature.
|
||
//
|
||
// Specifically, namespaces should be generated for all of the following
|
||
// namespaces below except for "f", which should not be generated:
|
||
//
|
||
// namespace a { export declare const a }
|
||
// namespace b { export declare let [[b]] }
|
||
// namespace c { export declare function c() }
|
||
// namespace d { export declare class d {} }
|
||
// namespace e { export declare enum e {} }
|
||
// namespace f { export declare namespace f {} }
|
||
//
|
||
// The TypeScript compiler compiles this into the following code (notice "f"
|
||
// is missing):
|
||
//
|
||
// var a; (function (a_1) {})(a or (a = {}));
|
||
// var b; (function (b_1) {})(b or (b = {}));
|
||
// var c; (function (c_1) {})(c or (c = {}));
|
||
// var d; (function (d_1) {})(d or (d = {}));
|
||
// var e; (function (e_1) {})(e or (e = {}));
|
||
//
|
||
// Note that this should not be implemented by declaring symbols for "export
|
||
// declare" statements because the TypeScript compiler doesn't generate any
|
||
// code for these statements, so these statements are actually references to
|
||
// global variables. There is one exception, which is that local variables
|
||
// *should* be declared as symbols because they are replaced with. This seems
|
||
// like very arbitrary behavior but it's what the TypeScript compiler does,
|
||
// so we try to match it.
|
||
//
|
||
// Specifically, in the following code below "a" and "b" should be declared
|
||
// and should be substituted with "ns.a" and "ns.b" but the other symbols
|
||
// shouldn't. References to the other symbols actually refer to global
|
||
// variables instead of to symbols that are exported from the namespace.
|
||
// This is the case as of TypeScript 4.3. I assume this is a TypeScript bug:
|
||
//
|
||
// namespace ns {
|
||
// export declare const a
|
||
// export declare let [[b]]
|
||
// export declare function c()
|
||
// export declare class d { }
|
||
// export declare enum e { }
|
||
// console.log(a, b, c, d, e)
|
||
// }
|
||
//
|
||
// The TypeScript compiler compiles this into the following code:
|
||
//
|
||
// var ns;
|
||
// (function (ns) {
|
||
// console.log(ns.a, ns.b, c, d, e);
|
||
// })(ns or (ns = {}));
|
||
//
|
||
// Relevant issue: https://github.com/evanw/esbuild/issues/1158
|
||
has_non_local_export_declare_inside_namespace: bool = false,
|
||
|
||
// This helps recognize the "await import()" pattern. When this is present,
|
||
// warnings about non-string import paths will be omitted inside try blocks.
|
||
await_target: ?js_ast.Expr.Data = null,
|
||
|
||
to_expr_wrapper_namespace: Binding2ExprWrapper.Namespace,
|
||
to_expr_wrapper_hoisted: Binding2ExprWrapper.Hoisted,
|
||
|
||
// This helps recognize the "import().catch()" pattern. We also try to avoid
|
||
// warning about this just like the "try { await import() }" pattern.
|
||
then_catch_chain: ThenCatchChain,
|
||
|
||
// Temporary variables used for lowering
|
||
temp_refs_to_declare: List(TempRef) = .{},
|
||
temp_ref_count: i32 = 0,
|
||
|
||
// When bundling, hoisted top-level local variables declared with "var" in
|
||
// nested scopes are moved up to be declared in the top-level scope instead.
|
||
// The old "var" statements are turned into regular assignments instead. This
|
||
// makes it easier to quickly scan the top-level statements for "var" locals
|
||
// with the guarantee that all will be found.
|
||
relocated_top_level_vars: List(js_ast.LocRef) = .{},
|
||
|
||
// ArrowFunction is a special case in the grammar. Although it appears to be
|
||
// a PrimaryExpression, it's actually an AssignmentExpression. This means if
|
||
// a AssignmentExpression ends up producing an ArrowFunction then nothing can
|
||
// come after it other than the comma operator, since the comma operator is
|
||
// the only thing above AssignmentExpression under the Expression rule:
|
||
//
|
||
// AssignmentExpression:
|
||
// ArrowFunction
|
||
// ConditionalExpression
|
||
// LeftHandSideExpression = AssignmentExpression
|
||
// LeftHandSideExpression AssignmentOperator AssignmentExpression
|
||
//
|
||
// Expression:
|
||
// AssignmentExpression
|
||
// Expression , AssignmentExpression
|
||
//
|
||
after_arrow_body_loc: logger.Loc = logger.Loc.Empty,
|
||
import_transposer: ImportTransposer,
|
||
require_transposer: RequireTransposer,
|
||
require_resolve_transposer: RequireResolveTransposer,
|
||
|
||
const_values: js_ast.Ast.ConstValuesMap = .{},
|
||
|
||
// These are backed by stack fallback allocators in _parse, and are uninitialized until then.
|
||
binary_expression_stack: ListManaged(BinaryExpressionVisitor) = undefined,
|
||
binary_expression_simplify_stack: ListManaged(SideEffects.BinaryExpressionSimplifyVisitor) = undefined,
|
||
|
||
/// We build up enough information about the TypeScript namespace hierarchy to
|
||
/// be able to resolve scope lookups and property accesses for TypeScript enum
|
||
/// and namespace features. Each JavaScript scope object inside a namespace
|
||
/// has a reference to a map of exported namespace members from sibling scopes.
|
||
///
|
||
/// In addition, there is a map from each relevant symbol reference to the data
|
||
/// associated with that namespace or namespace member: "ref_to_ts_namespace_member".
|
||
/// This gives enough info to be able to resolve queries into the namespace.
|
||
ref_to_ts_namespace_member: std.AutoHashMapUnmanaged(Ref, js_ast.TSNamespaceMember.Data) = .{},
|
||
/// When visiting expressions, namespace metadata is associated with the most
|
||
/// recently visited node. If namespace metadata is present, "tsNamespaceTarget"
|
||
/// will be set to the most recently visited node (as a way to mark that this
|
||
/// node has metadata) and "tsNamespaceMemberData" will be set to the metadata.
|
||
ts_namespace: RecentlyVisitedTSNamespace = .{},
|
||
top_level_enums: std.ArrayListUnmanaged(Ref) = .{},
|
||
|
||
scopes_in_order_for_enum: std.AutoArrayHashMapUnmanaged(logger.Loc, []ScopeOrder) = .{},
|
||
|
||
// If this is true, then all top-level statements are wrapped in a try/catch
|
||
will_wrap_module_in_try_catch_for_using: bool = false,
|
||
|
||
/// Used for react refresh, it must be able to insert `const _s = $RefreshSig$();`
|
||
nearest_stmt_list: ?*ListManaged(Stmt) = null,
|
||
|
||
const RecentlyVisitedTSNamespace = struct {
|
||
expr: Expr.Data = Expr.empty.data,
|
||
map: ?*js_ast.TSNamespaceMemberMap = null,
|
||
|
||
const ExpressionData = union(enum) {
|
||
ref: Ref,
|
||
ptr: *E.Dot,
|
||
};
|
||
};
|
||
|
||
/// "Fast Refresh" is React's solution for hot-module-reloading in the context of the UI framework
|
||
/// user guide: https://reactnative.dev/docs/fast-refresh (applies to react-dom and native)
|
||
///
|
||
/// This depends on performing a couple extra transformations at bundle time, as well as
|
||
/// including the `react-refresh` NPM package, which is able to do the heavy lifting,
|
||
/// integrating with `react` and `react-dom`.
|
||
///
|
||
/// Prior implementations:
|
||
/// [1]: https://github.com/facebook/react/blob/main/packages/react-refresh/src/ReactFreshBabelPlugin.js
|
||
/// [2]: https://github.com/swc-project/swc/blob/main/crates/swc_ecma_transforms_react/src/refresh/mod.rs
|
||
///
|
||
/// Additional reading:
|
||
/// [3] https://github.com/facebook/react/issues/16604#issuecomment-528663101
|
||
/// [4] https://github.com/facebook/react/blob/master/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js
|
||
///
|
||
/// Instead of a plugin which visits the tree separately, Bun's implementation of fast refresh
|
||
/// happens in tandem with the visit pass. The responsibilities of the transform are as follows:
|
||
///
|
||
/// 1. For all Components (which is defined as any top-level function/function variable, that is
|
||
/// named with a capital letter; see `isComponentishName`), register them to the runtime using
|
||
/// `$RefreshReg$(ComponentFunction, "Component");`. Implemented in `p.handleReactRefreshRegister`
|
||
/// HOC components are also registered, but only through a special case for `export default`
|
||
///
|
||
/// 2. For all functions which call a Hook (a hook is an identifier matching /^use[A-Z]/):
|
||
/// a. Outside of the function, create a signature function `const _s = $RefreshSig$();`
|
||
/// b. At the start of the function, call `_s()`
|
||
/// c. Record all of the hooks called, the variables they are assigned to, and
|
||
/// arguments depending on which hook has been used. `useState` and `useReducer`,
|
||
/// for example, are special-cased.
|
||
/// d. Directly after the function, call `_s(hook, "<hash>", forceReset)`
|
||
/// - If a user-defined hook is called, the alterate form is used:
|
||
/// `_s(hook, "<hash>", forceReset, () => [useCustom1, useCustom2])`
|
||
///
|
||
/// The upstream transforms do not declare `$RefreshReg$` or `$RefreshSig$`. A typical
|
||
/// implementation might look like this, prepending this data to the module start:
|
||
///
|
||
/// import * as Refresh from 'react-refresh/runtime';
|
||
/// const $RefreshReg$ = (type, id) => Refresh.register(type, "<file id here>" + id);
|
||
/// const $RefreshSig$ = Refresh.createSignatureFunctionForTransform;
|
||
///
|
||
/// Since Bun is a transpiler *and* bundler, we take a slightly different approach. Aside
|
||
/// from including the link to the refresh runtime, our notation of $RefreshReg$ is just
|
||
/// pointing at `Refresh.register`, which means when we call it, the second argument has
|
||
/// to be a string containing the filepath, not just the component name.
|
||
const ReactRefresh = struct {
|
||
// Set if this JSX/TSX file uses the refresh runtime. If so,
|
||
// we must insert an import statement to it.
|
||
register_used: bool = false,
|
||
signature_used: bool = false,
|
||
|
||
/// $RefreshReg$ is called on all top-level variables that are
|
||
/// components, as well as HOCs found in the `export default` clause.
|
||
register_ref: Ref = Ref.None,
|
||
|
||
/// $RefreshSig$ is called to create a signature function, which is
|
||
/// used by the refresh runtime to perform smart hook tracking.
|
||
create_signature_ref: Ref = Ref.None,
|
||
|
||
/// If a comment with '@refresh reset' is seen, we will forward a
|
||
/// force refresh to the refresh runtime. This lets you reset the
|
||
/// state of hooks on an update on a per-component basis.
|
||
// TODO: this is never set
|
||
force_reset: bool = false,
|
||
|
||
/// The last hook that was scanned. This is used when visiting
|
||
/// `.s_local`, as we must hash the variable destructure if the
|
||
/// hook's result is assigned directly to a local.
|
||
last_hook_seen: ?*E.Call = null,
|
||
|
||
/// Every function sets up stack memory to hold data related to it's
|
||
/// hook tracking. This is a pointer to that ?HookContext, where an
|
||
/// inner null means there are no hook calls.
|
||
///
|
||
/// The inner value is initialized when the first hook .e_call is
|
||
/// visited, where the '_s' symbol is reserved. Additional hook calls
|
||
/// append to the `hasher` and `user_hooks` as needed.
|
||
///
|
||
/// When a function is done visiting, the stack location is checked,
|
||
/// and then it will insert `var _s = ...`, add the `_s()` call at
|
||
/// the start of the function, and then add the call to `_s(func, ...)`.
|
||
hook_ctx_storage: ?*?HookContext = null,
|
||
|
||
pub const HookContext = struct {
|
||
hasher: std.hash.Wyhash,
|
||
signature_cb: Ref,
|
||
user_hooks: std.AutoArrayHashMapUnmanaged(Ref, Expr),
|
||
};
|
||
|
||
// https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L42
|
||
pub fn isComponentishName(id: []const u8) bool {
|
||
if (id.len == 0) return false;
|
||
return switch (id[0]) {
|
||
'A'...'Z' => true,
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
// https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L408
|
||
pub fn isHookName(id: []const u8) bool {
|
||
return id.len >= 4 and
|
||
strings.hasPrefixComptime(id, "use") and
|
||
switch (id[3]) {
|
||
'A'...'Z' => true,
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
pub const built_in_hooks = bun.ComptimeEnumMap(enum {
|
||
useState,
|
||
useReducer,
|
||
useEffect,
|
||
useLayoutEffect,
|
||
useMemo,
|
||
useCallback,
|
||
useRef,
|
||
useContext,
|
||
useImperativeHandle,
|
||
useDebugValue,
|
||
useId,
|
||
useDeferredValue,
|
||
useTransition,
|
||
useInsertionEffect,
|
||
useSyncExternalStore,
|
||
useFormStatus,
|
||
useFormState,
|
||
useActionState,
|
||
useOptimistic,
|
||
});
|
||
};
|
||
|
||
/// use this instead of checking p.source.index
|
||
/// because when not bundling, p.source.index is `0`
|
||
inline fn isSourceRuntime(p: *const P) bool {
|
||
return p.options.bundle and p.source.index.isRuntime();
|
||
}
|
||
|
||
pub fn transposeImport(p: *P, arg: Expr, state: *const TransposeState) Expr {
|
||
// The argument must be a string
|
||
if (arg.data.as(.e_string)) |str| {
|
||
// Ignore calls to import() if the control flow is provably dead here.
|
||
// We don't want to spend time scanning the required files if they will
|
||
// never be used.
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Null{}, arg.loc);
|
||
}
|
||
|
||
const import_record_index = p.addImportRecord(.dynamic, arg.loc, str.slice(p.allocator));
|
||
|
||
if (state.import_record_tag) |tag| {
|
||
p.import_records.items[import_record_index].tag = tag;
|
||
}
|
||
|
||
p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target;
|
||
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
|
||
|
||
return p.newExpr(E.Import{
|
||
.expr = arg,
|
||
.import_record_index = @intCast(import_record_index),
|
||
.options = state.import_options,
|
||
}, state.loc);
|
||
}
|
||
|
||
if (p.options.warn_about_unbundled_modules) {
|
||
// Use a debug log so people can see this if they want to
|
||
const r = js_lexer.rangeOfIdentifier(p.source, state.loc);
|
||
p.log.addRangeDebug(p.source, r, "This \"import\" expression cannot be bundled because the argument is not a string literal") catch unreachable;
|
||
}
|
||
|
||
return p.newExpr(E.Import{
|
||
.expr = arg,
|
||
.options = state.import_options,
|
||
.import_record_index = std.math.maxInt(u32),
|
||
}, state.loc);
|
||
}
|
||
|
||
pub fn transposeRequireResolve(p: *P, arg: Expr, require_resolve_ref: Expr) Expr {
|
||
// The argument must be a string
|
||
if (arg.data == .e_string) {
|
||
return p.transposeRequireResolveKnownString(arg);
|
||
}
|
||
|
||
if (p.options.warn_about_unbundled_modules) {
|
||
// Use a debug log so people can see this if they want to
|
||
const r = js_lexer.rangeOfIdentifier(p.source, arg.loc);
|
||
p.log.addRangeDebug(p.source, r, "This \"require.resolve\" expression cannot be bundled because the argument is not a string literal") catch unreachable;
|
||
}
|
||
|
||
const args = p.allocator.alloc(Expr, 1) catch unreachable;
|
||
args[0] = arg;
|
||
|
||
return p.newExpr(E.Call{
|
||
.target = require_resolve_ref,
|
||
.args = ExprNodeList.init(args),
|
||
}, arg.loc);
|
||
}
|
||
|
||
pub inline fn transposeRequireResolveKnownString(p: *P, arg: Expr) Expr {
|
||
bun.assert(arg.data == .e_string);
|
||
|
||
// Ignore calls to import() if the control flow is provably dead here.
|
||
// We don't want to spend time scanning the required files if they will
|
||
// never be used.
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Null{}, arg.loc);
|
||
}
|
||
|
||
const import_record_index = p.addImportRecord(.require_resolve, arg.loc, arg.data.e_string.string(p.allocator) catch unreachable);
|
||
p.import_records.items[import_record_index].handles_import_errors = p.fn_or_arrow_data_visit.try_body_count != 0;
|
||
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
|
||
|
||
return p.newExpr(
|
||
E.RequireResolveString{
|
||
.import_record_index = import_record_index,
|
||
// .leading_interior_comments = arg.getString().
|
||
},
|
||
arg.loc,
|
||
);
|
||
}
|
||
|
||
pub fn transposeRequire(p: *P, arg: Expr, state: *const TransposeState) Expr {
|
||
if (!p.options.features.allow_runtime) {
|
||
const args = p.allocator.alloc(Expr, 1) catch bun.outOfMemory();
|
||
args[0] = arg;
|
||
return p.newExpr(
|
||
E.Call{
|
||
.target = p.valueForRequire(arg.loc),
|
||
.args = ExprNodeList.init(args),
|
||
},
|
||
arg.loc,
|
||
);
|
||
}
|
||
|
||
switch (arg.data) {
|
||
.e_string => |str| {
|
||
// Ignore calls to require() if the control flow is provably dead here.
|
||
// We don't want to spend time scanning the required files if they will
|
||
// never be used.
|
||
if (p.is_control_flow_dead) {
|
||
return Expr{ .data = nullExprData, .loc = arg.loc };
|
||
}
|
||
|
||
str.resolveRopeIfNeeded(p.allocator);
|
||
const pathname = str.string(p.allocator) catch unreachable;
|
||
const path = fs.Path.init(pathname);
|
||
|
||
const handles_import_errors = p.fn_or_arrow_data_visit.try_body_count != 0;
|
||
|
||
// For unwrapping CommonJS into ESM to fully work
|
||
// we must also unwrap requires into imports.
|
||
const should_unwrap_require = p.options.features.unwrap_commonjs_to_esm and
|
||
(p.unwrap_all_requires or
|
||
if (path.packageName()) |pkg| p.options.features.shouldUnwrapRequire(pkg) else false) and
|
||
// We cannot unwrap a require wrapped in a try/catch because
|
||
// import statements cannot be wrapped in a try/catch and
|
||
// require cannot return a promise.
|
||
!handles_import_errors;
|
||
|
||
if (should_unwrap_require) {
|
||
const import_record_index = p.addImportRecordByRangeAndPath(.stmt, p.source.rangeOfString(arg.loc), path);
|
||
p.import_records.items[import_record_index].handles_import_errors = handles_import_errors;
|
||
|
||
// Note that this symbol may be completely removed later.
|
||
var path_name = fs.PathName.init(path.text);
|
||
const name = path_name.nonUniqueNameString(p.allocator) catch bun.outOfMemory();
|
||
const namespace_ref = p.newSymbol(.other, name) catch bun.outOfMemory();
|
||
|
||
p.imports_to_convert_from_require.append(p.allocator, .{
|
||
.namespace = .{
|
||
.ref = namespace_ref,
|
||
.loc = arg.loc,
|
||
},
|
||
.import_record_id = import_record_index,
|
||
}) catch bun.outOfMemory();
|
||
p.import_items_for_namespace.put(p.allocator, namespace_ref, ImportItemForNamespaceMap.init(p.allocator)) catch bun.outOfMemory();
|
||
p.recordUsage(namespace_ref);
|
||
|
||
if (!state.is_require_immediately_assigned_to_decl) {
|
||
return p.newExpr(E.Identifier{
|
||
.ref = namespace_ref,
|
||
}, arg.loc);
|
||
}
|
||
|
||
return p.newExpr(
|
||
E.RequireString{
|
||
.import_record_index = import_record_index,
|
||
.unwrapped_id = @as(u32, @intCast(p.imports_to_convert_from_require.items.len - 1)),
|
||
},
|
||
arg.loc,
|
||
);
|
||
}
|
||
|
||
const import_record_index = p.addImportRecordByRangeAndPath(.require, p.source.rangeOfString(arg.loc), path);
|
||
p.import_records.items[import_record_index].handles_import_errors = handles_import_errors;
|
||
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
|
||
|
||
return p.newExpr(E.RequireString{ .import_record_index = import_record_index }, arg.loc);
|
||
},
|
||
else => {
|
||
p.recordUsageOfRuntimeRequire();
|
||
const args = p.allocator.alloc(Expr, 1) catch unreachable;
|
||
args[0] = arg;
|
||
return p.newExpr(
|
||
E.Call{
|
||
.target = p.valueForRequire(arg.loc),
|
||
.args = ExprNodeList.init(args),
|
||
},
|
||
arg.loc,
|
||
);
|
||
},
|
||
}
|
||
}
|
||
|
||
pub inline fn shouldUnwrapCommonJSToESM(p: *const P) bool {
|
||
return p.options.features.unwrap_commonjs_to_esm;
|
||
}
|
||
|
||
fn isBindingUsed(p: *P, binding: Binding, default_export_ref: Ref) bool {
|
||
switch (binding.data) {
|
||
.b_identifier => |ident| {
|
||
if (default_export_ref.eql(ident.ref)) return true;
|
||
if (p.named_imports.contains(ident.ref))
|
||
return true;
|
||
|
||
for (p.named_exports.values()) |named_export| {
|
||
if (named_export.ref.eql(ident.ref))
|
||
return true;
|
||
}
|
||
|
||
const symbol: *const Symbol = &p.symbols.items[ident.ref.innerIndex()];
|
||
return symbol.use_count_estimate > 0;
|
||
},
|
||
.b_array => |array| {
|
||
for (array.items) |item| {
|
||
if (isBindingUsed(p, item.binding, default_export_ref)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |prop| {
|
||
if (isBindingUsed(p, prop.value, default_export_ref)) {
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
},
|
||
.b_missing => return false,
|
||
}
|
||
}
|
||
|
||
pub fn treeShake(p: *P, parts: *[]js_ast.Part, merge: bool) void {
|
||
var parts_: []js_ast.Part = parts.*;
|
||
defer {
|
||
if (merge and parts_.len > 1) {
|
||
var first_none_part: usize = parts_.len;
|
||
var stmts_count: usize = 0;
|
||
for (parts_, 0..) |part, i| {
|
||
if (part.tag == .none) {
|
||
stmts_count += part.stmts.len;
|
||
first_none_part = @min(i, first_none_part);
|
||
}
|
||
}
|
||
|
||
if (first_none_part < parts_.len) {
|
||
const stmts_list = p.allocator.alloc(Stmt, stmts_count) catch unreachable;
|
||
var stmts_remain = stmts_list;
|
||
|
||
for (parts_) |part| {
|
||
if (part.tag == .none) {
|
||
bun.copy(Stmt, stmts_remain, part.stmts);
|
||
stmts_remain = stmts_remain[part.stmts.len..];
|
||
}
|
||
}
|
||
|
||
parts_[first_none_part].stmts = stmts_list;
|
||
|
||
parts_ = parts_[0 .. first_none_part + 1];
|
||
}
|
||
}
|
||
|
||
parts.* = parts_;
|
||
}
|
||
const default_export_ref =
|
||
if (p.named_exports.get("default")) |default_| default_.ref else Ref.None;
|
||
|
||
while (parts_.len > 1) {
|
||
var parts_end: usize = 0;
|
||
const last_end = parts_.len;
|
||
|
||
for (parts_) |part| {
|
||
const is_dead = part.can_be_removed_if_unused and can_remove_part: {
|
||
for (part.stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_local => |local| {
|
||
if (local.is_export) break :can_remove_part false;
|
||
for (local.decls.slice()) |decl| {
|
||
if (isBindingUsed(p, decl.binding, default_export_ref))
|
||
break :can_remove_part false;
|
||
}
|
||
},
|
||
.s_if => |if_statement| {
|
||
const result = SideEffects.toBoolean(p, if_statement.test_.data);
|
||
if (!(result.ok and result.side_effects == .no_side_effects and !result.value)) {
|
||
break :can_remove_part false;
|
||
}
|
||
},
|
||
.s_while => |while_statement| {
|
||
const result = SideEffects.toBoolean(p, while_statement.test_.data);
|
||
if (!(result.ok and result.side_effects == .no_side_effects and !result.value)) {
|
||
break :can_remove_part false;
|
||
}
|
||
},
|
||
.s_for => |for_statement| {
|
||
if (for_statement.test_) |expr| {
|
||
const result = SideEffects.toBoolean(p, expr.data);
|
||
if (!(result.ok and result.side_effects == .no_side_effects and !result.value)) {
|
||
break :can_remove_part false;
|
||
}
|
||
}
|
||
},
|
||
.s_function => |func| {
|
||
if (func.func.flags.contains(.is_export)) break :can_remove_part false;
|
||
if (func.func.name) |name| {
|
||
const symbol: *const Symbol = &p.symbols.items[name.ref.?.innerIndex()];
|
||
|
||
if (name.ref.?.eql(default_export_ref) or
|
||
symbol.use_count_estimate > 0 or
|
||
p.named_exports.contains(symbol.original_name) or
|
||
p.named_imports.contains(name.ref.?) or
|
||
p.is_import_item.get(name.ref.?) != null)
|
||
{
|
||
break :can_remove_part false;
|
||
}
|
||
}
|
||
},
|
||
.s_import,
|
||
.s_export_clause,
|
||
.s_export_from,
|
||
.s_export_default,
|
||
=> break :can_remove_part false,
|
||
|
||
.s_class => |class| {
|
||
if (class.is_export) break :can_remove_part false;
|
||
if (class.class.class_name) |name| {
|
||
const symbol: *const Symbol = &p.symbols.items[name.ref.?.innerIndex()];
|
||
|
||
if (name.ref.?.eql(default_export_ref) or
|
||
symbol.use_count_estimate > 0 or
|
||
p.named_exports.contains(symbol.original_name) or
|
||
p.named_imports.contains(name.ref.?) or
|
||
p.is_import_item.get(name.ref.?) != null)
|
||
{
|
||
break :can_remove_part false;
|
||
}
|
||
}
|
||
},
|
||
|
||
else => break :can_remove_part false,
|
||
}
|
||
}
|
||
break :can_remove_part true;
|
||
};
|
||
|
||
if (is_dead) {
|
||
p.clearSymbolUsagesFromDeadPart(part);
|
||
|
||
continue;
|
||
}
|
||
|
||
parts_[parts_end] = part;
|
||
parts_end += 1;
|
||
}
|
||
|
||
parts_.len = parts_end;
|
||
if (last_end == parts_.len) {
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
const ImportTransposer = ExpressionTransposer(P, *const TransposeState, P.transposeImport);
|
||
const RequireTransposer = ExpressionTransposer(P, *const TransposeState, P.transposeRequire);
|
||
const RequireResolveTransposer = ExpressionTransposer(P, Expr, P.transposeRequireResolve);
|
||
|
||
const Binding2ExprWrapper = struct {
|
||
pub const Namespace = Binding.ToExpr(P, P.wrapIdentifierNamespace);
|
||
pub const Hoisted = Binding.ToExpr(P, P.wrapIdentifierHoisting);
|
||
};
|
||
|
||
fn clearSymbolUsagesFromDeadPart(p: *P, part: js_ast.Part) void {
|
||
const symbol_use_refs = part.symbol_uses.keys();
|
||
const symbol_use_values = part.symbol_uses.values();
|
||
var symbols = p.symbols.items;
|
||
|
||
for (symbol_use_refs, symbol_use_values) |ref, prev| {
|
||
symbols[ref.innerIndex()].use_count_estimate -|= prev.count_estimate;
|
||
}
|
||
const declared_refs = part.declared_symbols.refs();
|
||
for (declared_refs) |declared| {
|
||
symbols[declared.innerIndex()].use_count_estimate = 0;
|
||
}
|
||
}
|
||
|
||
pub fn s(_: *P, t: anytype, loc: logger.Loc) Stmt {
|
||
const Type = @TypeOf(t);
|
||
if (!is_typescript_enabled and (Type == S.TypeScript or Type == *S.TypeScript)) {
|
||
@compileError("Attempted to use TypeScript syntax in a non-TypeScript environment");
|
||
}
|
||
|
||
// Output.print("\nStmt: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start });
|
||
if (@typeInfo(Type) == .Pointer) {
|
||
// ExportFrom normally becomes import records during the visiting pass
|
||
// However, we skip the visiting pass in this mode
|
||
// So we must generate a minimum version of it here.
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
// if (@TypeOf(t) == *S.ExportFrom) {
|
||
// switch (call.target.data) {
|
||
// .e_identifier => |ident| {
|
||
// // is this a require("something")
|
||
// if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args[0].data) == .e_string) {
|
||
// _ = p.addImportRecord(.require, loc, call.args[0].data.e_string.string(p.allocator) catch unreachable);
|
||
// }
|
||
// },
|
||
// else => {},
|
||
// }
|
||
// }
|
||
}
|
||
|
||
return Stmt.init(std.meta.Child(Type), t, loc);
|
||
} else {
|
||
return Stmt.alloc(Type, t, loc);
|
||
}
|
||
}
|
||
|
||
fn computeCharacterFrequency(p: *P) ?js_ast.CharFreq {
|
||
if (!p.options.features.minify_identifiers or p.isSourceRuntime()) {
|
||
return null;
|
||
}
|
||
|
||
// Add everything in the file to the histogram
|
||
var freq: js_ast.CharFreq = .{
|
||
.freqs = [_]i32{0} ** 64,
|
||
};
|
||
|
||
freq.scan(p.source.contents, 1);
|
||
|
||
// Subtract out all comments
|
||
for (p.lexer.all_comments.items) |comment_range| {
|
||
freq.scan(p.source.textForRange(comment_range), -1);
|
||
}
|
||
|
||
// Subtract out all import paths
|
||
for (p.import_records.items) |record| {
|
||
freq.scan(record.path.text, -1);
|
||
}
|
||
|
||
const ScopeVisitor = struct {
|
||
pub fn visit(symbols: []const js_ast.Symbol, char_freq: *js_ast.CharFreq, scope: *js_ast.Scope) void {
|
||
var iter = scope.members.iterator();
|
||
|
||
while (iter.next()) |entry| {
|
||
const symbol: *const Symbol = &symbols[entry.value_ptr.ref.innerIndex()];
|
||
|
||
if (symbol.slotNamespace() != .must_not_be_renamed) {
|
||
char_freq.scan(symbol.original_name, -@as(i32, @intCast(symbol.use_count_estimate)));
|
||
}
|
||
}
|
||
|
||
if (scope.label_ref) |ref| {
|
||
const symbol = &symbols[ref.innerIndex()];
|
||
|
||
if (symbol.slotNamespace() != .must_not_be_renamed) {
|
||
char_freq.scan(symbol.original_name, -@as(i32, @intCast(symbol.use_count_estimate)) - 1);
|
||
}
|
||
}
|
||
|
||
for (scope.children.slice()) |child| {
|
||
visit(symbols, char_freq, child);
|
||
}
|
||
}
|
||
};
|
||
ScopeVisitor.visit(p.symbols.items, &freq, p.module_scope);
|
||
|
||
// TODO: mangledProps
|
||
|
||
return freq;
|
||
}
|
||
|
||
pub fn newExpr(p: *P, t: anytype, loc: logger.Loc) Expr {
|
||
const Type = @TypeOf(t);
|
||
|
||
comptime {
|
||
if (jsx_transform_type == .none) {
|
||
if (Type == E.JSXElement or Type == *E.JSXElement) {
|
||
@compileError("JSXElement is not supported in this environment");
|
||
}
|
||
}
|
||
}
|
||
|
||
// Output.print("\nExpr: {s} - {d}\n", .{ @typeName(@TypeOf(t)), loc.start });
|
||
if (@typeInfo(Type) == .Pointer) {
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
if (Type == *E.Call) {
|
||
const call: *E.Call = t;
|
||
switch (call.target.data) {
|
||
.e_identifier => |ident| {
|
||
// is this a require("something")
|
||
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
|
||
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
return Expr.init(std.meta.Child(Type), t.*, loc);
|
||
} else {
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
if (Type == E.Call) {
|
||
const call: E.Call = t;
|
||
switch (call.target.data) {
|
||
.e_identifier => |ident| {
|
||
// is this a require("something")
|
||
if (strings.eqlComptime(p.loadNameFromRef(ident.ref), "require") and call.args.len == 1 and std.meta.activeTag(call.args.ptr[0].data) == .e_string) {
|
||
_ = p.addImportRecord(.require, loc, call.args.first_().data.e_string.string(p.allocator) catch unreachable);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
return Expr.init(Type, t, loc);
|
||
}
|
||
}
|
||
|
||
pub fn b(p: *P, t: anytype, loc: logger.Loc) Binding {
|
||
if (@typeInfo(@TypeOf(t)) == .Pointer) {
|
||
return Binding.init(t, loc);
|
||
} else {
|
||
return Binding.alloc(p.allocator, t, loc);
|
||
}
|
||
}
|
||
|
||
pub fn findSymbol(p: *P, loc: logger.Loc, name: string) !FindSymbolResult {
|
||
return findSymbolWithRecordUsage(p, loc, name, true);
|
||
}
|
||
|
||
pub fn findSymbolWithRecordUsage(p: *P, loc: logger.Loc, name: string, comptime record_usage: bool) !FindSymbolResult {
|
||
var declare_loc: logger.Loc = logger.Loc.Empty;
|
||
var is_inside_with_scope = false;
|
||
// This function can show up in profiling.
|
||
// That's part of why we do this.
|
||
// Instead of rehashing `name` for every scope, we do it just once.
|
||
const hash = Scope.getMemberHash(name);
|
||
const allocator = p.allocator;
|
||
|
||
const ref: Ref = brk: {
|
||
var current: ?*Scope = p.current_scope;
|
||
|
||
var did_forbid_arguments = false;
|
||
|
||
while (current) |scope| : (current = current.?.parent) {
|
||
// Track if we're inside a "with" statement body
|
||
if (scope.kind == .with) {
|
||
is_inside_with_scope = true;
|
||
}
|
||
|
||
// Forbid referencing "arguments" inside class bodies
|
||
if (scope.forbid_arguments and !did_forbid_arguments and strings.eqlComptime(name, "arguments")) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||
p.log.addRangeErrorFmt(p.source, r, allocator, "Cannot access \"{s}\" here", .{name}) catch unreachable;
|
||
did_forbid_arguments = true;
|
||
}
|
||
|
||
// Is the symbol a member of this scope?
|
||
if (scope.getMemberWithHash(name, hash)) |member| {
|
||
declare_loc = member.loc;
|
||
break :brk member.ref;
|
||
}
|
||
|
||
// Is the symbol a member of this scope's TypeScript namespace?
|
||
if (scope.ts_namespace) |ts_namespace| {
|
||
if (ts_namespace.exported_members.get(name)) |member| {
|
||
if (member.data.isEnum() == ts_namespace.is_enum_scope) {
|
||
declare_loc = member.loc;
|
||
// If this is an identifier from a sibling TypeScript namespace, then we're
|
||
// going to have to generate a property access instead of a simple reference.
|
||
// Lazily-generate an identifier that represents this property access.
|
||
const gop = try ts_namespace.property_accesses.getOrPut(p.allocator, name);
|
||
if (!gop.found_existing) {
|
||
const ref = try p.newSymbol(.other, name);
|
||
gop.value_ptr.* = ref;
|
||
p.symbols.items[ref.inner_index].namespace_alias = .{
|
||
.namespace_ref = ts_namespace.arg_ref,
|
||
.alias = name,
|
||
};
|
||
break :brk ref;
|
||
}
|
||
break :brk gop.value_ptr.*;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Allocate an "unbound" symbol
|
||
p.checkForNonBMPCodePoint(loc, name);
|
||
if (comptime !record_usage) {
|
||
return FindSymbolResult{
|
||
.ref = Ref.None,
|
||
.declare_loc = loc,
|
||
.is_inside_with_scope = is_inside_with_scope,
|
||
};
|
||
}
|
||
|
||
const gpe = p.module_scope.getOrPutMemberWithHash(allocator, name, hash) catch unreachable;
|
||
|
||
// I don't think this happens?
|
||
if (gpe.found_existing) {
|
||
const existing = gpe.value_ptr.*;
|
||
declare_loc = existing.loc;
|
||
break :brk existing.ref;
|
||
}
|
||
|
||
const _ref = p.newSymbol(.unbound, name) catch unreachable;
|
||
|
||
gpe.key_ptr.* = name;
|
||
gpe.value_ptr.* = js_ast.Scope.Member{ .ref = _ref, .loc = loc };
|
||
|
||
declare_loc = loc;
|
||
|
||
break :brk _ref;
|
||
};
|
||
|
||
// If we had to pass through a "with" statement body to get to the symbol
|
||
// declaration, then this reference could potentially also refer to a
|
||
// property on the target object of the "with" statement. We must not rename
|
||
// it or we risk changing the behavior of the code.
|
||
if (is_inside_with_scope) {
|
||
p.symbols.items[ref.innerIndex()].must_not_be_renamed = true;
|
||
}
|
||
|
||
// Track how many times we've referenced this symbol
|
||
if (comptime record_usage) p.recordUsage(ref);
|
||
|
||
return FindSymbolResult{
|
||
.ref = ref,
|
||
.declare_loc = declare_loc,
|
||
.is_inside_with_scope = is_inside_with_scope,
|
||
};
|
||
}
|
||
|
||
pub fn recordExportedBinding(p: *P, binding: Binding) void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |ident| {
|
||
p.recordExport(binding.loc, p.symbols.items[ident.ref.innerIndex()].original_name, ident.ref) catch unreachable;
|
||
},
|
||
.b_array => |array| {
|
||
for (array.items) |prop| {
|
||
p.recordExportedBinding(prop.binding);
|
||
}
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |prop| {
|
||
p.recordExportedBinding(prop.value);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
pub fn recordExport(p: *P, loc: logger.Loc, alias: string, ref: Ref) !void {
|
||
if (p.named_exports.get(alias)) |name| {
|
||
// Duplicate exports are an error
|
||
var notes = try p.allocator.alloc(logger.Data, 1);
|
||
notes[0] = logger.Data{
|
||
.text = try std.fmt.allocPrint(p.allocator, "\"{s}\" was originally exported here", .{alias}),
|
||
.location = logger.Location.initOrNull(p.source, js_lexer.rangeOfIdentifier(p.source, name.alias_loc)),
|
||
};
|
||
try p.log.addRangeErrorFmtWithNotes(
|
||
p.source,
|
||
js_lexer.rangeOfIdentifier(p.source, loc),
|
||
p.allocator,
|
||
notes,
|
||
"Multiple exports with the same name \"{s}\"",
|
||
.{std.mem.trim(u8, alias, "\"'")},
|
||
);
|
||
} else if (!p.isDeoptimizedCommonJS()) {
|
||
try p.named_exports.put(p.allocator, alias, js_ast.NamedExport{ .alias_loc = loc, .ref = ref });
|
||
}
|
||
}
|
||
|
||
fn isDeoptimizedCommonJS(p: *P) bool {
|
||
return p.commonjs_named_exports_deoptimized and p.commonjs_named_exports.count() > 0;
|
||
}
|
||
|
||
pub fn recordUsage(p: *P, ref: Ref) void {
|
||
if (p.is_revisit_for_substitution) return;
|
||
// The use count stored in the symbol is used for generating symbol names
|
||
// during minification. These counts shouldn't include references inside dead
|
||
// code regions since those will be culled.
|
||
if (!p.is_control_flow_dead) {
|
||
if (comptime Environment.allow_assert) assert(p.symbols.items.len > ref.innerIndex());
|
||
p.symbols.items[ref.innerIndex()].use_count_estimate += 1;
|
||
var result = p.symbol_uses.getOrPut(p.allocator, ref) catch unreachable;
|
||
if (!result.found_existing) {
|
||
result.value_ptr.* = Symbol.Use{ .count_estimate = 1 };
|
||
} else {
|
||
result.value_ptr.count_estimate += 1;
|
||
}
|
||
}
|
||
|
||
// The correctness of TypeScript-to-JavaScript conversion relies on accurate
|
||
// symbol use counts for the whole file, including dead code regions. This is
|
||
// tracked separately in a parser-only data structure.
|
||
if (is_typescript_enabled) {
|
||
p.ts_use_counts.items[ref.innerIndex()] += 1;
|
||
}
|
||
}
|
||
|
||
fn logArrowArgErrors(p: *P, errors: *DeferredArrowArgErrors) void {
|
||
if (errors.invalid_expr_await.len > 0) {
|
||
const r = errors.invalid_expr_await;
|
||
p.log.addRangeError(p.source, r, "Cannot use an \"await\" expression here") catch unreachable;
|
||
}
|
||
|
||
if (errors.invalid_expr_yield.len > 0) {
|
||
const r = errors.invalid_expr_yield;
|
||
p.log.addRangeError(p.source, r, "Cannot use a \"yield\" expression here") catch unreachable;
|
||
}
|
||
}
|
||
|
||
fn keyNameForError(p: *P, key: js_ast.Expr) string {
|
||
switch (key.data) {
|
||
.e_string => {
|
||
return key.data.e_string.string(p.allocator) catch unreachable;
|
||
},
|
||
.e_private_identifier => |private| {
|
||
return p.loadNameFromRef(private.ref);
|
||
},
|
||
else => {
|
||
return "property";
|
||
},
|
||
}
|
||
}
|
||
|
||
/// This function is very very hot.
|
||
pub fn handleIdentifier(p: *P, loc: logger.Loc, ident: E.Identifier, original_name: ?string, opts: IdentifierOpts) Expr {
|
||
const ref = ident.ref;
|
||
|
||
if (p.options.features.inlining) {
|
||
if (p.const_values.get(ref)) |replacement| {
|
||
p.ignoreUsage(ref);
|
||
return replacement;
|
||
}
|
||
}
|
||
|
||
// Create an error for assigning to an import namespace
|
||
if ((opts.assign_target != .none or opts.is_delete_target) and p.symbols.items[ref.innerIndex()].kind == .import) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot assign to import \"{s}\"", .{
|
||
p.symbols.items[ref.innerIndex()].original_name,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
// Substitute an EImportIdentifier now if this has a namespace alias
|
||
if (opts.assign_target == .none and !opts.is_delete_target) {
|
||
const symbol = &p.symbols.items[ref.inner_index];
|
||
if (symbol.namespace_alias) |ns_alias| {
|
||
if (p.ref_to_ts_namespace_member.get(ns_alias.namespace_ref)) |ts_member_data| {
|
||
if (ts_member_data == .namespace) {
|
||
if (ts_member_data.namespace.get(ns_alias.alias)) |member| {
|
||
switch (member.data) {
|
||
.enum_number => |num| return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_number = .{ .value = num } } },
|
||
p.symbols.items[ref.inner_index].original_name,
|
||
),
|
||
|
||
.enum_string => |str| return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_string = str } },
|
||
p.symbols.items[ref.inner_index].original_name,
|
||
),
|
||
|
||
.namespace => |map| {
|
||
const expr = p.newExpr(E.Dot{
|
||
.target = p.newExpr(E.Identifier.init(ns_alias.namespace_ref), loc),
|
||
.name = ns_alias.alias,
|
||
.name_loc = loc,
|
||
}, loc);
|
||
p.ts_namespace = .{
|
||
.expr = expr.data,
|
||
.map = map,
|
||
};
|
||
return expr;
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
return p.newExpr(E.ImportIdentifier{
|
||
.ref = ident.ref,
|
||
.was_originally_identifier = true,
|
||
}, loc);
|
||
}
|
||
}
|
||
|
||
// Substitute an EImportIdentifier now if this is an import item
|
||
if (p.is_import_item.contains(ref)) {
|
||
return p.newExpr(
|
||
E.ImportIdentifier{ .ref = ref, .was_originally_identifier = opts.was_originally_identifier },
|
||
loc,
|
||
);
|
||
}
|
||
|
||
if (is_typescript_enabled) {
|
||
if (p.ref_to_ts_namespace_member.get(ref)) |member_data| {
|
||
switch (member_data) {
|
||
.enum_number => |num| return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_number = .{ .value = num } } },
|
||
p.symbols.items[ref.inner_index].original_name,
|
||
),
|
||
|
||
.enum_string => |str| return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_string = str } },
|
||
p.symbols.items[ref.inner_index].original_name,
|
||
),
|
||
|
||
.namespace => |map| {
|
||
const expr: Expr = .{
|
||
.data = .{ .e_identifier = ident },
|
||
.loc = loc,
|
||
};
|
||
|
||
p.ts_namespace = .{
|
||
.expr = expr.data,
|
||
.map = map,
|
||
};
|
||
|
||
return expr;
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// Substitute a namespace export reference now if appropriate
|
||
if (p.is_exported_inside_namespace.get(ref)) |ns_ref| {
|
||
const name = p.symbols.items[ref.innerIndex()].original_name;
|
||
|
||
p.recordUsage(ns_ref);
|
||
const prop = p.newExpr(E.Dot{
|
||
.target = p.newExpr(E.Identifier.init(ns_ref), loc),
|
||
.name = name,
|
||
.name_loc = loc,
|
||
}, loc);
|
||
|
||
if (p.ts_namespace.expr == .e_identifier and
|
||
p.ts_namespace.expr.e_identifier.ref.eql(ident.ref))
|
||
{
|
||
p.ts_namespace.expr = prop.data;
|
||
}
|
||
|
||
return prop;
|
||
}
|
||
}
|
||
|
||
if (original_name) |name| {
|
||
const result = p.findSymbol(loc, name) catch unreachable;
|
||
var id_clone = ident;
|
||
id_clone.ref = result.ref;
|
||
return p.newExpr(id_clone, loc);
|
||
}
|
||
|
||
return .{
|
||
.data = .{ .e_identifier = ident },
|
||
.loc = loc,
|
||
};
|
||
}
|
||
|
||
pub fn generateImportStmt(
|
||
p: *P,
|
||
import_path: string,
|
||
imports: anytype,
|
||
parts: *ListManaged(js_ast.Part),
|
||
symbols: anytype,
|
||
additional_stmt: ?Stmt,
|
||
comptime suffix: string,
|
||
comptime is_internal: bool,
|
||
) anyerror!void {
|
||
const allocator = p.allocator;
|
||
const import_record_i = p.addImportRecordByRange(.stmt, logger.Range.None, import_path);
|
||
var import_record: *ImportRecord = &p.import_records.items[import_record_i];
|
||
if (comptime is_internal)
|
||
import_record.path.namespace = "runtime";
|
||
import_record.is_internal = is_internal;
|
||
const import_path_identifier = try import_record.path.name.nonUniqueNameString(allocator);
|
||
var namespace_identifier = try allocator.alloc(u8, import_path_identifier.len + suffix.len);
|
||
const clause_items = try allocator.alloc(js_ast.ClauseItem, imports.len);
|
||
var stmts = try allocator.alloc(Stmt, 1 + if (additional_stmt != null) @as(usize, 1) else @as(usize, 0));
|
||
var declared_symbols = DeclaredSymbol.List{};
|
||
try declared_symbols.ensureTotalCapacity(allocator, imports.len + 1);
|
||
bun.copy(u8, namespace_identifier, suffix);
|
||
bun.copy(u8, namespace_identifier[suffix.len..], import_path_identifier);
|
||
|
||
const namespace_ref = try p.newSymbol(.other, namespace_identifier);
|
||
declared_symbols.appendAssumeCapacity(.{
|
||
.ref = namespace_ref,
|
||
.is_top_level = true,
|
||
});
|
||
try p.module_scope.generated.push(allocator, namespace_ref);
|
||
for (imports, clause_items) |alias, *clause_item| {
|
||
const ref = symbols.get(alias) orelse unreachable;
|
||
const alias_name = if (@TypeOf(symbols) == RuntimeImports) RuntimeImports.all[alias] else alias;
|
||
clause_item.* = js_ast.ClauseItem{
|
||
.alias = alias_name,
|
||
.original_name = alias_name,
|
||
.alias_loc = logger.Loc{},
|
||
.name = LocRef{ .ref = ref, .loc = logger.Loc{} },
|
||
};
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = ref, .is_top_level = true });
|
||
|
||
// ensure every e_import_identifier holds the namespace
|
||
if (p.options.features.hot_module_reloading) {
|
||
const symbol = &p.symbols.items[ref.inner_index];
|
||
if (symbol.namespace_alias == null) {
|
||
symbol.namespace_alias = .{
|
||
.namespace_ref = namespace_ref,
|
||
.alias = alias_name,
|
||
.import_record_index = import_record_i,
|
||
};
|
||
}
|
||
}
|
||
|
||
try p.is_import_item.put(allocator, ref, {});
|
||
try p.named_imports.put(allocator, ref, js_ast.NamedImport{
|
||
.alias = alias_name,
|
||
.alias_loc = logger.Loc{},
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = import_record_i,
|
||
});
|
||
}
|
||
|
||
stmts[0] = p.s(
|
||
S.Import{
|
||
.namespace_ref = namespace_ref,
|
||
.items = clause_items,
|
||
.import_record_index = import_record_i,
|
||
.is_single_line = true,
|
||
},
|
||
logger.Loc{},
|
||
);
|
||
if (additional_stmt) |add| {
|
||
stmts[1] = add;
|
||
}
|
||
|
||
var import_records = try allocator.alloc(@TypeOf(import_record_i), 1);
|
||
import_records[0] = import_record_i;
|
||
|
||
// This import is placed in a part before the main code, however
|
||
// the bundler ends up re-ordering this to be after... The order
|
||
// does not matter as ESM imports are always hoisted.
|
||
parts.append(js_ast.Part{
|
||
.stmts = stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.import_record_indices = bun.BabyList(u32).init(import_records),
|
||
.tag = .runtime,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
pub fn generateReactRefreshImport(
|
||
p: *P,
|
||
parts: *ListManaged(js_ast.Part),
|
||
import_path: []const u8,
|
||
clauses: []const ReactRefreshImportClause,
|
||
) !void {
|
||
switch (p.options.features.hot_module_reloading) {
|
||
inline else => |hmr| try p.generateReactRefreshImportHmr(parts, import_path, clauses, hmr),
|
||
}
|
||
}
|
||
|
||
const ReactRefreshImportClause = struct {
|
||
name: []const u8,
|
||
enabled: bool,
|
||
ref: Ref,
|
||
};
|
||
|
||
fn generateReactRefreshImportHmr(
|
||
p: *P,
|
||
parts: *ListManaged(js_ast.Part),
|
||
import_path: []const u8,
|
||
clauses: []const ReactRefreshImportClause,
|
||
comptime hot_module_reloading: bool,
|
||
) !void {
|
||
// If `hot_module_reloading`, we are going to generate a require call:
|
||
//
|
||
// const { $RefreshSig$, $RefreshReg$ } = require("react-refresh/runtime")`
|
||
//
|
||
// Otherwise we are going to settle on an import statement. Using
|
||
// require is fine in HMR bundling because `react-refresh` itself is
|
||
// already a CommonJS module, and it will actually be more efficient
|
||
// at runtime this way.
|
||
const allocator = p.allocator;
|
||
const import_record_index = p.addImportRecordByRange(.stmt, logger.Range.None, import_path);
|
||
|
||
const Item = if (hot_module_reloading) B.Object.Property else js_ast.ClauseItem;
|
||
|
||
const len = 1 + @as(usize, @intFromBool(p.react_refresh.register_used)) +
|
||
@as(usize, @intFromBool(p.react_refresh.signature_used));
|
||
var items = try List(Item).initCapacity(allocator, len);
|
||
|
||
const stmts = try allocator.alloc(Stmt, 1);
|
||
var declared_symbols = DeclaredSymbol.List{};
|
||
try declared_symbols.ensureTotalCapacity(allocator, len);
|
||
|
||
const namespace_ref = try p.newSymbol(.other, "RefreshRuntime");
|
||
declared_symbols.appendAssumeCapacity(.{
|
||
.ref = namespace_ref,
|
||
.is_top_level = true,
|
||
});
|
||
try p.module_scope.generated.push(allocator, namespace_ref);
|
||
|
||
for (clauses) |entry| {
|
||
if (entry.enabled) {
|
||
items.appendAssumeCapacity(if (hot_module_reloading) .{
|
||
.key = p.newExpr(E.String{ .data = entry.name }, logger.Loc.Empty),
|
||
.value = p.b(B.Identifier{ .ref = entry.ref }, logger.Loc.Empty),
|
||
} else .{
|
||
.alias = entry.name,
|
||
.original_name = entry.name,
|
||
.alias_loc = logger.Loc{},
|
||
.name = LocRef{ .ref = entry.ref, .loc = logger.Loc{} },
|
||
});
|
||
declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true });
|
||
try p.module_scope.generated.push(allocator, entry.ref);
|
||
try p.is_import_item.put(allocator, entry.ref, {});
|
||
try p.named_imports.put(allocator, entry.ref, .{
|
||
.alias = entry.name,
|
||
.alias_loc = logger.Loc.Empty,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = import_record_index,
|
||
});
|
||
}
|
||
}
|
||
|
||
stmts[0] = p.s(if (hot_module_reloading)
|
||
S.Local{
|
||
.kind = .k_const,
|
||
.decls = try Decl.List.fromSlice(p.allocator, &.{.{
|
||
.binding = p.b(B.Object{
|
||
.properties = items.items,
|
||
}, logger.Loc.Empty),
|
||
.value = p.newExpr(E.RequireString{
|
||
.import_record_index = import_record_index,
|
||
}, logger.Loc.Empty),
|
||
}}),
|
||
}
|
||
else
|
||
S.Import{
|
||
.namespace_ref = namespace_ref,
|
||
.items = items.items,
|
||
.import_record_index = import_record_index,
|
||
.is_single_line = false,
|
||
}, logger.Loc.Empty);
|
||
|
||
try parts.append(.{
|
||
.stmts = stmts,
|
||
.declared_symbols = declared_symbols,
|
||
.import_record_indices = try bun.BabyList(u32).fromSlice(allocator, &.{import_record_index}),
|
||
.tag = .runtime,
|
||
});
|
||
}
|
||
|
||
fn substituteSingleUseSymbolInStmt(p: *P, stmt: Stmt, ref: Ref, replacement: Expr) bool {
|
||
const expr: *Expr = brk: {
|
||
switch (stmt.data) {
|
||
.s_expr => |exp| {
|
||
break :brk &exp.value;
|
||
},
|
||
.s_throw => |throw| {
|
||
break :brk &throw.value;
|
||
},
|
||
.s_return => |ret| {
|
||
if (ret.value) |*value| {
|
||
break :brk value;
|
||
}
|
||
},
|
||
.s_if => |if_stmt| {
|
||
break :brk &if_stmt.test_;
|
||
},
|
||
.s_switch => |switch_stmt| {
|
||
break :brk &switch_stmt.test_;
|
||
},
|
||
.s_local => |local| {
|
||
if (local.decls.len > 0) {
|
||
var first: *Decl = &local.decls.ptr[0];
|
||
if (first.value) |*value| {
|
||
if (first.binding.data == .b_identifier) {
|
||
break :brk value;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return false;
|
||
};
|
||
|
||
// Only continue trying to insert this replacement into sub-expressions
|
||
// after the first one if the replacement has no side effects:
|
||
//
|
||
// // Substitution is ok
|
||
// let replacement = 123;
|
||
// return x + replacement;
|
||
//
|
||
// // Substitution is not ok because "fn()" may change "x"
|
||
// let replacement = fn();
|
||
// return x + replacement;
|
||
//
|
||
// // Substitution is not ok because "x == x" may change "x" due to "valueOf()" evaluation
|
||
// let replacement = [x];
|
||
// return (x == x) + replacement;
|
||
//
|
||
const replacement_can_be_removed = p.exprCanBeRemovedIfUnused(&replacement);
|
||
switch (p.substituteSingleUseSymbolInExpr(expr.*, ref, replacement, replacement_can_be_removed)) {
|
||
.success => |result| {
|
||
if (result.data == .e_binary or result.data == .e_unary or result.data == .e_if) {
|
||
const prev_substituting = p.is_revisit_for_substitution;
|
||
p.is_revisit_for_substitution = true;
|
||
defer p.is_revisit_for_substitution = prev_substituting;
|
||
// O(n^2) and we will need to think more carefully about
|
||
// this once we implement syntax compression
|
||
expr.* = p.visitExpr(result);
|
||
} else {
|
||
expr.* = result;
|
||
}
|
||
|
||
return true;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
fn substituteSingleUseSymbolInExpr(
|
||
p: *P,
|
||
expr: Expr,
|
||
ref: Ref,
|
||
replacement: Expr,
|
||
replacement_can_be_removed: bool,
|
||
) Substitution {
|
||
outer: {
|
||
switch (expr.data) {
|
||
.e_identifier => |ident| {
|
||
if (ident.ref.eql(ref) or p.symbols.items[ident.ref.innerIndex()].link.eql(ref)) {
|
||
p.ignoreUsage(ref);
|
||
return .{ .success = replacement };
|
||
}
|
||
},
|
||
.e_new => |new| {
|
||
switch (p.substituteSingleUseSymbolInExpr(new.target, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
new.target = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
new.target = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
if (replacement_can_be_removed) {
|
||
for (new.args.slice()) |*arg| {
|
||
switch (p.substituteSingleUseSymbolInExpr(arg.*, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
arg.* = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
arg.* = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.e_spread => |spread| {
|
||
switch (p.substituteSingleUseSymbolInExpr(spread.value, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
spread.value = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
spread.value = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
.e_await => |await_expr| {
|
||
switch (p.substituteSingleUseSymbolInExpr(await_expr.value, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
await_expr.value = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
await_expr.value = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
.e_yield => |yield| {
|
||
switch (p.substituteSingleUseSymbolInExpr(yield.value orelse Expr{ .data = .{ .e_missing = .{} }, .loc = expr.loc }, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
yield.value = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
yield.value = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
.e_import => |import| {
|
||
switch (p.substituteSingleUseSymbolInExpr(import.expr, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
import.expr = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
import.expr = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
// The "import()" expression has side effects but the side effects are
|
||
// always asynchronous so there is no way for the side effects to modify
|
||
// the replacement value. So it's ok to reorder the replacement value
|
||
// past the "import()" expression assuming everything else checks out.
|
||
|
||
if (replacement_can_be_removed and p.exprCanBeRemovedIfUnused(&import.expr)) {
|
||
return .{ .continue_ = expr };
|
||
}
|
||
},
|
||
.e_unary => |e| {
|
||
switch (e.op) {
|
||
.un_pre_inc, .un_post_inc, .un_pre_dec, .un_post_dec, .un_delete => {
|
||
// Do not substitute into an assignment position
|
||
},
|
||
else => {
|
||
switch (p.substituteSingleUseSymbolInExpr(e.value, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.value = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.value = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
}
|
||
},
|
||
.e_dot => |e| {
|
||
switch (p.substituteSingleUseSymbolInExpr(e.target, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.target = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.target = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
.e_binary => |e| {
|
||
// Do not substitute into an assignment position
|
||
if (e.op.binaryAssignTarget() == .none) {
|
||
switch (p.substituteSingleUseSymbolInExpr(e.left, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.left = result;
|
||
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.left = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
} else if (!p.exprCanBeRemovedIfUnused(&e.left)) {
|
||
// Do not reorder past a side effect in an assignment target, as that may
|
||
// change the replacement value. For example, "fn()" may change "a" here:
|
||
//
|
||
// let a = 1;
|
||
// foo[fn()] = a;
|
||
//
|
||
return .{ .failure = expr };
|
||
} else if (e.op.binaryAssignTarget() == .update and !replacement_can_be_removed) {
|
||
// If this is a read-modify-write assignment and the replacement has side
|
||
// effects, don't reorder it past the assignment target. The assignment
|
||
// target is being read so it may be changed by the side effect. For
|
||
// example, "fn()" may change "foo" here:
|
||
//
|
||
// let a = fn();
|
||
// foo += a;
|
||
//
|
||
return .{ .failure = expr };
|
||
}
|
||
|
||
// If we get here then it should be safe to attempt to substitute the
|
||
// replacement past the left operand into the right operand.
|
||
switch (p.substituteSingleUseSymbolInExpr(e.right, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.right = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.right = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
},
|
||
.e_if => |e| {
|
||
switch (p.substituteSingleUseSymbolInExpr(expr.data.e_if.test_, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.test_ = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.test_ = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
// Do not substitute our unconditionally-executed value into a branch
|
||
// unless the value itself has no side effects
|
||
if (replacement_can_be_removed) {
|
||
// Unlike other branches in this function such as "a && b" or "a?.[b]",
|
||
// the "a ? b : c" form has potential code evaluation along both control
|
||
// flow paths. Handle this by allowing substitution into either branch.
|
||
// Side effects in one branch should not prevent the substitution into
|
||
// the other branch.
|
||
|
||
const yes = p.substituteSingleUseSymbolInExpr(e.yes, ref, replacement, replacement_can_be_removed);
|
||
if (yes == .success) {
|
||
e.yes = yes.success;
|
||
return .{ .success = expr };
|
||
}
|
||
|
||
const no = p.substituteSingleUseSymbolInExpr(e.no, ref, replacement, replacement_can_be_removed);
|
||
if (no == .success) {
|
||
e.no = no.success;
|
||
return .{ .success = expr };
|
||
}
|
||
|
||
// Side effects in either branch should stop us from continuing to try to
|
||
// substitute the replacement after the control flow branches merge again.
|
||
if (yes != .continue_ or no != .continue_) {
|
||
return .{ .failure = expr };
|
||
}
|
||
}
|
||
},
|
||
.e_index => |index| {
|
||
switch (p.substituteSingleUseSymbolInExpr(index.target, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
index.target = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
index.target = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
// Do not substitute our unconditionally-executed value into a branch
|
||
// unless the value itself has no side effects
|
||
if (replacement_can_be_removed or index.optional_chain == null) {
|
||
switch (p.substituteSingleUseSymbolInExpr(index.index, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
index.index = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
index.index = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
.e_call => |e| {
|
||
// Don't substitute something into a call target that could change "this"
|
||
switch (replacement.data) {
|
||
.e_dot, .e_index => {
|
||
if (e.target.data == .e_identifier and e.target.data.e_identifier.ref.eql(ref)) {
|
||
break :outer;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
switch (p.substituteSingleUseSymbolInExpr(e.target, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
e.target = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
e.target = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
// Do not substitute our unconditionally-executed value into a branch
|
||
// unless the value itself has no side effects
|
||
if (replacement_can_be_removed or e.optional_chain == null) {
|
||
for (e.args.slice()) |*arg| {
|
||
switch (p.substituteSingleUseSymbolInExpr(arg.*, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
arg.* = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
arg.* = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
|
||
.e_array => |e| {
|
||
for (e.items.slice()) |*item| {
|
||
switch (p.substituteSingleUseSymbolInExpr(item.*, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
item.* = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
item.* = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
.e_object => |e| {
|
||
for (e.properties.slice()) |*property| {
|
||
// Check the key
|
||
|
||
if (property.flags.contains(.is_computed)) {
|
||
switch (p.substituteSingleUseSymbolInExpr(property.key.?, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
property.key = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
property.key = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
|
||
// Stop now because both computed keys and property spread have side effects
|
||
return .{ .failure = expr };
|
||
}
|
||
|
||
// Check the value
|
||
if (property.value) |value| {
|
||
switch (p.substituteSingleUseSymbolInExpr(value, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
if (result.data == .e_missing) {
|
||
property.value = null;
|
||
} else {
|
||
property.value = result;
|
||
}
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
if (result.data == .e_missing) {
|
||
property.value = null;
|
||
} else {
|
||
property.value = result;
|
||
}
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
|
||
.e_template => |e| {
|
||
if (e.tag) |*tag| {
|
||
switch (p.substituteSingleUseSymbolInExpr(tag.*, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
tag.* = result;
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
tag.* = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
|
||
for (e.parts) |*part| {
|
||
switch (p.substituteSingleUseSymbolInExpr(part.value, ref, replacement, replacement_can_be_removed)) {
|
||
.continue_ => {},
|
||
.success => |result| {
|
||
part.value = result;
|
||
|
||
// todo: mangle template parts
|
||
|
||
return .{ .success = expr };
|
||
},
|
||
.failure => |result| {
|
||
part.value = result;
|
||
return .{ .failure = expr };
|
||
},
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// If both the replacement and this expression have no observable side
|
||
// effects, then we can reorder the replacement past this expression
|
||
if (replacement_can_be_removed and p.exprCanBeRemovedIfUnused(&expr)) {
|
||
return .{ .continue_ = expr };
|
||
}
|
||
|
||
const tag: Expr.Tag = @as(Expr.Tag, expr.data);
|
||
|
||
// We can always reorder past primitive values
|
||
if (tag.isPrimitiveLiteral()) {
|
||
return .{ .continue_ = expr };
|
||
}
|
||
|
||
// Otherwise we should stop trying to substitute past this point
|
||
return .{ .failure = expr };
|
||
}
|
||
|
||
pub fn prepareForVisitPass(p: *P) anyerror!void {
|
||
{
|
||
var count: usize = 0;
|
||
for (p.scopes_in_order.items) |item| {
|
||
if (item != null) {
|
||
count += 1;
|
||
}
|
||
}
|
||
var i: usize = 0;
|
||
p.scope_order_to_visit = try p.allocator.alloc(ScopeOrder, p.scopes_in_order.items.len);
|
||
for (p.scopes_in_order.items) |item| {
|
||
if (item) |_item| {
|
||
p.scope_order_to_visit[i] = _item;
|
||
i += 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
p.is_file_considered_to_have_esm_exports =
|
||
!p.top_level_await_keyword.isEmpty() or !p.esm_export_keyword.isEmpty() or
|
||
p.options.module_type == .esm;
|
||
|
||
try p.pushScopeForVisitPass(js_ast.Scope.Kind.entry, locModuleScope);
|
||
p.fn_or_arrow_data_visit.is_outside_fn_or_arrow = true;
|
||
p.module_scope = p.current_scope;
|
||
p.has_es_module_syntax = p.has_es_module_syntax or p.esm_import_keyword.len > 0 or p.esm_export_keyword.len > 0 or p.top_level_await_keyword.len > 0;
|
||
|
||
if (p.lexer.jsx_pragma.jsx()) |factory| {
|
||
p.options.jsx.factory = options.JSX.Pragma.memberListToComponentsIfDifferent(p.allocator, p.options.jsx.factory, factory.text) catch unreachable;
|
||
}
|
||
|
||
if (p.lexer.jsx_pragma.jsxFrag()) |fragment| {
|
||
p.options.jsx.fragment = options.JSX.Pragma.memberListToComponentsIfDifferent(p.allocator, p.options.jsx.fragment, fragment.text) catch unreachable;
|
||
}
|
||
|
||
if (p.lexer.jsx_pragma.jsxImportSource()) |import_source| {
|
||
p.options.jsx.classic_import_source = import_source.text;
|
||
p.options.jsx.package_name = p.options.jsx.classic_import_source;
|
||
p.options.jsx.setImportSource(p.allocator);
|
||
}
|
||
|
||
if (p.lexer.jsx_pragma.jsxRuntime()) |runtime| {
|
||
if (options.JSX.RuntimeMap.get(runtime.text)) |jsx_runtime| {
|
||
p.options.jsx.runtime = jsx_runtime;
|
||
} else {
|
||
// make this a warning instead of an error because we don't support "preserve" right now
|
||
try p.log.addRangeWarningFmt(p.source, runtime.range, p.allocator, "Unsupported JSX runtime: \"{s}\"", .{runtime.text});
|
||
}
|
||
}
|
||
|
||
// ECMAScript modules are always interpreted as strict mode. This has to be
|
||
// done before "hoistSymbols" because strict mode can alter hoisting (!).
|
||
if (p.esm_import_keyword.len > 0) {
|
||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_import);
|
||
} else if (p.esm_export_keyword.len > 0) {
|
||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_export);
|
||
} else if (p.top_level_await_keyword.len > 0) {
|
||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_top_level_await);
|
||
}
|
||
|
||
p.hoistSymbols(p.module_scope);
|
||
|
||
var generated_symbols_count: u32 = 3;
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
generated_symbols_count += 3;
|
||
}
|
||
|
||
if (is_jsx_enabled) {
|
||
generated_symbols_count += 7;
|
||
|
||
if (p.options.jsx.development) generated_symbols_count += 1;
|
||
}
|
||
|
||
try p.module_scope.generated.ensureUnusedCapacity(p.allocator, generated_symbols_count * 3);
|
||
try p.module_scope.members.ensureUnusedCapacity(p.allocator, generated_symbols_count * 3 + p.module_scope.members.count());
|
||
|
||
p.exports_ref = try p.declareCommonJSSymbol(.hoisted, "exports");
|
||
p.module_ref = try p.declareCommonJSSymbol(.hoisted, "module");
|
||
|
||
p.require_ref = try p.declareCommonJSSymbol(.unbound, "require");
|
||
p.dirname_ref = try p.declareCommonJSSymbol(.unbound, "__dirname");
|
||
p.filename_ref = try p.declareCommonJSSymbol(.unbound, "__filename");
|
||
|
||
if (p.options.features.inject_jest_globals) {
|
||
p.jest.describe = try p.declareCommonJSSymbol(.unbound, "describe");
|
||
p.jest.@"test" = try p.declareCommonJSSymbol(.unbound, "test");
|
||
p.jest.jest = try p.declareCommonJSSymbol(.unbound, "jest");
|
||
p.jest.it = try p.declareCommonJSSymbol(.unbound, "it");
|
||
p.jest.expect = try p.declareCommonJSSymbol(.unbound, "expect");
|
||
p.jest.beforeEach = try p.declareCommonJSSymbol(.unbound, "beforeEach");
|
||
p.jest.afterEach = try p.declareCommonJSSymbol(.unbound, "afterEach");
|
||
p.jest.beforeAll = try p.declareCommonJSSymbol(.unbound, "beforeAll");
|
||
p.jest.afterAll = try p.declareCommonJSSymbol(.unbound, "afterAll");
|
||
}
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
p.react_refresh.create_signature_ref = try p.declareGeneratedSymbol(.other, "$RefreshSig$");
|
||
p.react_refresh.register_ref = try p.declareGeneratedSymbol(.other, "$RefreshReg$");
|
||
}
|
||
|
||
switch (p.options.features.server_components) {
|
||
.none, .client_side => {},
|
||
.wrap_exports_for_client_reference => {
|
||
p.server_components_wrap_ref = try p.declareGeneratedSymbol(.other, "registerClientReference");
|
||
},
|
||
// TODO: these wrapping modes.
|
||
.wrap_anon_server_functions => {},
|
||
.wrap_exports_for_server_reference => {},
|
||
}
|
||
}
|
||
|
||
fn ensureRequireSymbol(p: *P) void {
|
||
if (p.runtime_imports.__require != null) return;
|
||
const static_symbol = generatedSymbolName("__require");
|
||
p.runtime_imports.__require = declareSymbolMaybeGenerated(p, .other, logger.Loc.Empty, static_symbol, true) catch bun.outOfMemory();
|
||
p.runtime_imports.put("__require", p.runtime_imports.__require.?);
|
||
}
|
||
|
||
pub fn resolveCommonJSSymbols(p: *P) void {
|
||
if (!p.options.features.allow_runtime)
|
||
return;
|
||
|
||
p.ensureRequireSymbol();
|
||
}
|
||
|
||
fn willUseRenamer(p: *P) bool {
|
||
return p.options.bundle or p.options.features.minify_identifiers;
|
||
}
|
||
|
||
fn hoistSymbols(p: *P, scope: *js_ast.Scope) void {
|
||
if (!scope.kindStopsHoisting()) {
|
||
var iter = scope.members.iterator();
|
||
const allocator = p.allocator;
|
||
var symbols = p.symbols.items;
|
||
|
||
defer {
|
||
if (comptime Environment.allow_assert) {
|
||
// we call `.newSymbol` in this function
|
||
// we need to avoid using a potentially re-sized array
|
||
// so we assert that the array is in sync
|
||
assert(symbols.ptr == p.symbols.items.ptr);
|
||
assert(symbols.len == p.symbols.items.len);
|
||
}
|
||
}
|
||
|
||
// Check for collisions that would prevent to hoisting "var" symbols up to the enclosing function scope
|
||
if (scope.parent) |parent_scope| {
|
||
nextMember: while (iter.next()) |res| {
|
||
var value = res.value_ptr.*;
|
||
var symbol: *Symbol = &symbols[value.ref.innerIndex()];
|
||
|
||
const name = symbol.original_name;
|
||
var hash: ?u64 = null;
|
||
|
||
if (parent_scope.kind == .catch_binding and symbol.kind != .hoisted) {
|
||
hash = Scope.getMemberHash(name);
|
||
if (parent_scope.getMemberWithHash(name, hash.?)) |existing_member| {
|
||
p.log.addSymbolAlreadyDeclaredError(
|
||
p.allocator,
|
||
p.source,
|
||
symbol.original_name,
|
||
value.loc,
|
||
existing_member.loc,
|
||
) catch unreachable;
|
||
continue;
|
||
}
|
||
}
|
||
|
||
if (!symbol.isHoisted()) {
|
||
continue;
|
||
}
|
||
|
||
var __scope = scope.parent;
|
||
if (comptime Environment.allow_assert)
|
||
assert(__scope != null);
|
||
|
||
var is_sloppy_mode_block_level_fn_stmt = false;
|
||
const original_member_ref = value.ref;
|
||
|
||
if (p.willUseRenamer() and symbol.kind == .hoisted_function) {
|
||
// Block-level function declarations behave like "let" in strict mode
|
||
if (scope.strict_mode != .sloppy_mode) {
|
||
continue;
|
||
}
|
||
|
||
// In sloppy mode, block level functions behave like "let" except with
|
||
// an assignment to "var", sort of. This code:
|
||
//
|
||
// if (x) {
|
||
// f();
|
||
// function f() {}
|
||
// }
|
||
// f();
|
||
//
|
||
// behaves like this code:
|
||
//
|
||
// if (x) {
|
||
// let f2 = function() {}
|
||
// var f = f2;
|
||
// f2();
|
||
// }
|
||
// f();
|
||
//
|
||
const hoisted_ref = p.newSymbol(.hoisted, symbol.original_name) catch unreachable;
|
||
symbols = p.symbols.items;
|
||
scope.generated.push(p.allocator, hoisted_ref) catch unreachable;
|
||
p.hoisted_ref_for_sloppy_mode_block_fn.put(p.allocator, value.ref, hoisted_ref) catch unreachable;
|
||
value.ref = hoisted_ref;
|
||
symbol = &symbols[hoisted_ref.innerIndex()];
|
||
is_sloppy_mode_block_level_fn_stmt = true;
|
||
}
|
||
|
||
if (hash == null) hash = Scope.getMemberHash(name);
|
||
|
||
while (__scope) |_scope| {
|
||
const scope_kind = _scope.kind;
|
||
|
||
// Variable declarations hoisted past a "with" statement may actually end
|
||
// up overwriting a property on the target of the "with" statement instead
|
||
// of initializing the variable. We must not rename them or we risk
|
||
// causing a behavior change.
|
||
//
|
||
// var obj = { foo: 1 }
|
||
// with (obj) { var foo = 2 }
|
||
// assert(foo === undefined)
|
||
// assert(obj.foo === 2)
|
||
//
|
||
if (scope_kind == .with) {
|
||
symbol.must_not_be_renamed = true;
|
||
}
|
||
|
||
if (_scope.getMemberWithHash(name, hash.?)) |member_in_scope| {
|
||
var existing_symbol: *Symbol = &symbols[member_in_scope.ref.innerIndex()];
|
||
const existing_kind = existing_symbol.kind;
|
||
|
||
// We can hoist the symbol from the child scope into the symbol in
|
||
// this scope if:
|
||
//
|
||
// - The symbol is unbound (i.e. a global variable access)
|
||
// - The symbol is also another hoisted variable
|
||
// - The symbol is a function of any kind and we're in a function or module scope
|
||
//
|
||
// Is this unbound (i.e. a global access) or also hoisted?
|
||
if (existing_kind == .unbound or existing_kind == .hoisted or
|
||
(Symbol.isKindFunction(existing_kind) and (scope_kind == .entry or scope_kind == .function_body)))
|
||
{
|
||
// Silently merge this symbol into the existing symbol
|
||
symbol.link = member_in_scope.ref;
|
||
const entry = _scope.getOrPutMemberWithHash(p.allocator, name, hash.?) catch unreachable;
|
||
entry.value_ptr.* = member_in_scope;
|
||
entry.key_ptr.* = name;
|
||
continue :nextMember;
|
||
}
|
||
|
||
// Otherwise if this isn't a catch identifier, it's a collision
|
||
if (existing_kind != .catch_identifier and existing_kind != .arguments) {
|
||
|
||
// An identifier binding from a catch statement and a function
|
||
// declaration can both silently shadow another hoisted symbol
|
||
if (symbol.kind != .catch_identifier and symbol.kind != .hoisted_function) {
|
||
if (!is_sloppy_mode_block_level_fn_stmt) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, value.loc);
|
||
var notes = allocator.alloc(logger.Data, 1) catch unreachable;
|
||
notes[0] =
|
||
logger.rangeData(
|
||
p.source,
|
||
r,
|
||
std.fmt.allocPrint(
|
||
allocator,
|
||
"{s} was originally declared here",
|
||
.{name},
|
||
) catch unreachable,
|
||
);
|
||
|
||
p.log.addRangeErrorFmtWithNotes(p.source, js_lexer.rangeOfIdentifier(p.source, member_in_scope.loc), allocator, notes, "{s} has already been declared", .{name}) catch unreachable;
|
||
} else if (_scope == scope.parent) {
|
||
// Never mind about this, turns out it's not needed after all
|
||
_ = p.hoisted_ref_for_sloppy_mode_block_fn.remove(original_member_ref);
|
||
}
|
||
}
|
||
continue :nextMember;
|
||
}
|
||
|
||
// If this is a catch identifier, silently merge the existing symbol
|
||
// into this symbol but continue hoisting past this catch scope
|
||
existing_symbol.link = value.ref;
|
||
const entry = _scope.getOrPutMemberWithHash(p.allocator, name, hash.?) catch unreachable;
|
||
entry.value_ptr.* = value;
|
||
entry.key_ptr.* = name;
|
||
}
|
||
|
||
if (_scope.kindStopsHoisting()) {
|
||
const entry = _scope.getOrPutMemberWithHash(allocator, name, hash.?) catch unreachable;
|
||
entry.value_ptr.* = value;
|
||
entry.key_ptr.* = name;
|
||
break;
|
||
}
|
||
|
||
__scope = _scope.parent;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
{
|
||
const children = scope.children.slice();
|
||
for (children) |child| {
|
||
p.hoistSymbols(child);
|
||
}
|
||
}
|
||
}
|
||
|
||
inline fn nextScopeInOrderForVisitPass(p: *P) ScopeOrder {
|
||
const head = p.scope_order_to_visit[0];
|
||
p.scope_order_to_visit = p.scope_order_to_visit[1..p.scope_order_to_visit.len];
|
||
return head;
|
||
}
|
||
|
||
fn pushScopeForVisitPass(p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) anyerror!void {
|
||
const order = p.nextScopeInOrderForVisitPass();
|
||
|
||
// Sanity-check that the scopes generated by the first and second passes match
|
||
if (bun.Environment.allow_assert and
|
||
order.loc.start != loc.start or order.scope.kind != kind)
|
||
{
|
||
p.log.level = .verbose;
|
||
|
||
p.log.addDebugFmt(p.source, loc, p.allocator, "Expected this scope (.{s})", .{@tagName(kind)}) catch bun.outOfMemory();
|
||
p.log.addDebugFmt(p.source, order.loc, p.allocator, "Found this scope (.{s})", .{@tagName(order.scope.kind)}) catch bun.outOfMemory();
|
||
|
||
p.panic("Scope mismatch while visiting", .{});
|
||
}
|
||
|
||
p.current_scope = order.scope;
|
||
|
||
try p.scopes_for_current_part.append(p.allocator, order.scope);
|
||
}
|
||
|
||
fn pushScopeForParsePass(p: *P, comptime kind: js_ast.Scope.Kind, loc: logger.Loc) !usize {
|
||
var parent: *Scope = p.current_scope;
|
||
const allocator = p.allocator;
|
||
var scope = try allocator.create(Scope);
|
||
|
||
scope.* = Scope{
|
||
.kind = kind,
|
||
.label_ref = null,
|
||
.parent = parent,
|
||
.generated = .{},
|
||
};
|
||
|
||
try parent.children.push(allocator, scope);
|
||
scope.strict_mode = parent.strict_mode;
|
||
|
||
p.current_scope = scope;
|
||
|
||
if (comptime kind == .with) {
|
||
// "with" statements change the default from ESModule to CommonJS at runtime.
|
||
// "with" statements are not allowed in strict mode.
|
||
if (p.options.features.commonjs_at_runtime) {
|
||
p.has_with_scope = true;
|
||
}
|
||
}
|
||
|
||
if (comptime Environment.isDebug) {
|
||
// Enforce that scope locations are strictly increasing to help catch bugs
|
||
// where the pushed scopes are mismatched between the first and second passes
|
||
if (p.scopes_in_order.items.len > 0) {
|
||
var last_i = p.scopes_in_order.items.len - 1;
|
||
while (p.scopes_in_order.items[last_i] == null and last_i > 0) {
|
||
last_i -= 1;
|
||
}
|
||
|
||
if (p.scopes_in_order.items[last_i]) |prev_scope| {
|
||
if (prev_scope.loc.start >= loc.start) {
|
||
p.log.level = .verbose;
|
||
p.log.addDebugFmt(p.source, prev_scope.loc, p.allocator, "Previous Scope", .{}) catch bun.outOfMemory();
|
||
p.log.addDebugFmt(p.source, loc, p.allocator, "Next Scope", .{}) catch bun.outOfMemory();
|
||
p.panic("Scope location {d} must be greater than {d}", .{ loc.start, prev_scope.loc.start });
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Copy down function arguments into the function body scope. That way we get
|
||
// errors if a statement in the function body tries to re-declare any of the
|
||
// arguments.
|
||
if (comptime kind == js_ast.Scope.Kind.function_body) {
|
||
if (comptime Environment.allow_assert)
|
||
assert(parent.kind == js_ast.Scope.Kind.function_args);
|
||
|
||
var iter = scope.parent.?.members.iterator();
|
||
while (iter.next()) |entry| {
|
||
// Don't copy down the optional function expression name. Re-declaring
|
||
// the name of a function expression is allowed.
|
||
const value = entry.value_ptr.*;
|
||
const adjacent_kind = p.symbols.items[value.ref.innerIndex()].kind;
|
||
if (adjacent_kind != .hoisted_function) {
|
||
try scope.members.put(allocator, entry.key_ptr.*, value);
|
||
}
|
||
}
|
||
}
|
||
|
||
// Remember the length in case we call popAndDiscardScope() later
|
||
const scope_index = p.scopes_in_order.items.len;
|
||
try p.scopes_in_order.append(allocator, ScopeOrder{ .loc = loc, .scope = scope });
|
||
// Output.print("\nLoc: {d}\n", .{loc.start});
|
||
return scope_index;
|
||
}
|
||
|
||
// Note: do not write to "p.log" in this function. Any errors due to conversion
|
||
// from expression to binding should be written to "invalidLog" instead. That
|
||
// way we can potentially keep this as an expression if it turns out it's not
|
||
// needed as a binding after all.
|
||
fn convertExprToBinding(p: *P, expr: ExprNodeIndex, invalid_loc: *LocList) ?Binding {
|
||
switch (expr.data) {
|
||
.e_missing => {
|
||
return null;
|
||
},
|
||
.e_identifier => |ex| {
|
||
return p.b(B.Identifier{ .ref = ex.ref }, expr.loc);
|
||
},
|
||
.e_array => |ex| {
|
||
if (ex.comma_after_spread) |spread| {
|
||
invalid_loc.append(.{
|
||
.loc = spread,
|
||
.kind = .spread,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
if (ex.is_parenthesized) {
|
||
invalid_loc.append(.{
|
||
.loc = p.source.rangeOfOperatorBefore(expr.loc, "(").loc,
|
||
.kind = .parentheses,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
// p.markSyntaxFeature(Destructing)
|
||
var items = List(js_ast.ArrayBinding).initCapacity(p.allocator, ex.items.len) catch unreachable;
|
||
var is_spread = false;
|
||
for (ex.items.slice(), 0..) |_, i| {
|
||
var item = ex.items.ptr[i];
|
||
if (item.data == .e_spread) {
|
||
is_spread = true;
|
||
item = item.data.e_spread.value;
|
||
}
|
||
const res = p.convertExprToBindingAndInitializer(&item, invalid_loc, is_spread);
|
||
|
||
items.appendAssumeCapacity(js_ast.ArrayBinding{
|
||
// It's valid for it to be missing
|
||
// An example:
|
||
// Promise.all(promises).then(([, len]) => true);
|
||
// ^ Binding is missing there
|
||
.binding = res.binding orelse p.b(B.Missing{}, item.loc),
|
||
.default_value = res.expr,
|
||
});
|
||
}
|
||
|
||
return p.b(B.Array{
|
||
.items = items.items,
|
||
.has_spread = is_spread,
|
||
.is_single_line = ex.is_single_line,
|
||
}, expr.loc);
|
||
},
|
||
.e_object => |ex| {
|
||
if (ex.comma_after_spread) |sp| {
|
||
invalid_loc.append(.{ .loc = sp, .kind = .spread }) catch unreachable;
|
||
}
|
||
|
||
if (ex.is_parenthesized) {
|
||
invalid_loc.append(.{ .loc = p.source.rangeOfOperatorBefore(expr.loc, "(").loc, .kind = .parentheses }) catch unreachable;
|
||
}
|
||
// p.markSyntaxFeature(compat.Destructuring, p.source.RangeOfOperatorAfter(expr.Loc, "{"))
|
||
|
||
var properties = List(B.Property).initCapacity(p.allocator, ex.properties.len) catch unreachable;
|
||
for (ex.properties.slice()) |*item| {
|
||
if (item.flags.contains(.is_method) or item.kind == .get or item.kind == .set) {
|
||
invalid_loc.append(.{
|
||
.loc = item.key.?.loc,
|
||
.kind = if (item.flags.contains(.is_method))
|
||
InvalidLoc.Tag.method
|
||
else if (item.kind == .get)
|
||
InvalidLoc.Tag.getter
|
||
else
|
||
InvalidLoc.Tag.setter,
|
||
}) catch unreachable;
|
||
continue;
|
||
}
|
||
const value = &item.value.?;
|
||
const tup = p.convertExprToBindingAndInitializer(value, invalid_loc, false);
|
||
const initializer = tup.expr orelse item.initializer;
|
||
const is_spread = item.kind == .spread or item.flags.contains(.is_spread);
|
||
properties.appendAssumeCapacity(B.Property{
|
||
.flags = Flags.Property.init(.{
|
||
.is_spread = is_spread,
|
||
.is_computed = item.flags.contains(.is_computed),
|
||
}),
|
||
.key = item.key orelse p.newExpr(E.Missing{}, expr.loc),
|
||
.value = tup.binding orelse p.b(B.Missing{}, expr.loc),
|
||
.default_value = initializer,
|
||
});
|
||
}
|
||
|
||
return p.b(B.Object{
|
||
.properties = properties.items,
|
||
.is_single_line = ex.is_single_line,
|
||
}, expr.loc);
|
||
},
|
||
else => {
|
||
invalid_loc.append(.{ .loc = expr.loc, .kind = .unknown }) catch unreachable;
|
||
return null;
|
||
},
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
fn convertExprToBindingAndInitializer(p: *P, _expr: *ExprNodeIndex, invalid_log: *LocList, is_spread: bool) ExprBindingTuple {
|
||
var initializer: ?ExprNodeIndex = null;
|
||
var expr = _expr;
|
||
// zig syntax is sometimes painful
|
||
switch (expr.*.data) {
|
||
.e_binary => |bin| {
|
||
if (bin.op == .bin_assign) {
|
||
initializer = bin.right;
|
||
expr = &bin.left;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
const bind = p.convertExprToBinding(expr.*, invalid_log);
|
||
if (initializer) |initial| {
|
||
const equalsRange = p.source.rangeOfOperatorBefore(initial.loc, "=");
|
||
if (is_spread) {
|
||
p.log.addRangeError(p.source, equalsRange, "A rest argument cannot have a default initializer") catch unreachable;
|
||
} else {
|
||
// p.markSyntaxFeature();
|
||
}
|
||
}
|
||
return ExprBindingTuple{ .binding = bind, .expr = initializer };
|
||
}
|
||
|
||
const BinaryExpressionVisitor = struct {
|
||
e: *E.Binary,
|
||
loc: logger.Loc,
|
||
in: ExprIn,
|
||
|
||
/// Input for visiting the left child
|
||
left_in: ExprIn,
|
||
|
||
/// "Local variables" passed from "checkAndPrepare" to "visitRightAndFinish"
|
||
is_stmt_expr: bool = false,
|
||
|
||
pub fn visitRightAndFinish(
|
||
v: *BinaryExpressionVisitor,
|
||
p: *P,
|
||
) Expr {
|
||
var e_ = v.e;
|
||
const is_call_target = @as(Expr.Tag, p.call_target) == .e_binary and e_ == p.call_target.e_binary;
|
||
// const is_stmt_expr = @as(Expr.Tag, p.stmt_expr_value) == .e_binary and expr.data.e_binary == p.stmt_expr_value.e_binary;
|
||
const was_anonymous_named_expr = e_.right.isAnonymousNamed();
|
||
|
||
// Mark the control flow as dead if the branch is never taken
|
||
switch (e_.op) {
|
||
.bin_logical_or => {
|
||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||
if (side_effects.ok and side_effects.value) {
|
||
// "true || dead"
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
e_.right = p.visitExpr(e_.right);
|
||
p.is_control_flow_dead = old;
|
||
} else {
|
||
e_.right = p.visitExpr(e_.right);
|
||
}
|
||
},
|
||
.bin_logical_and => {
|
||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||
if (side_effects.ok and !side_effects.value) {
|
||
// "false && dead"
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
e_.right = p.visitExpr(e_.right);
|
||
p.is_control_flow_dead = old;
|
||
} else {
|
||
e_.right = p.visitExpr(e_.right);
|
||
}
|
||
},
|
||
.bin_nullish_coalescing => {
|
||
const side_effects = SideEffects.toNullOrUndefined(p, e_.left.data);
|
||
if (side_effects.ok and !side_effects.value) {
|
||
// "notNullOrUndefined ?? dead"
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
e_.right = p.visitExpr(e_.right);
|
||
p.is_control_flow_dead = old;
|
||
} else {
|
||
e_.right = p.visitExpr(e_.right);
|
||
}
|
||
},
|
||
else => {
|
||
e_.right = p.visitExpr(e_.right);
|
||
},
|
||
}
|
||
|
||
// Always put constants on the right for equality comparisons to help
|
||
// reduce the number of cases we have to check during pattern matching. We
|
||
// can only reorder expressions that do not have any side effects.
|
||
switch (e_.op) {
|
||
.bin_loose_eq, .bin_loose_ne, .bin_strict_eq, .bin_strict_ne => {
|
||
if (SideEffects.isPrimitiveToReorder(e_.left.data) and !SideEffects.isPrimitiveToReorder(e_.right.data)) {
|
||
const _left = e_.left;
|
||
const _right = e_.right;
|
||
e_.left = _right;
|
||
e_.right = _left;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
switch (e_.op) {
|
||
.bin_comma => {
|
||
// "(1, 2)" => "2"
|
||
// "(sideEffects(), 2)" => "(sideEffects(), 2)"
|
||
if (p.options.features.minify_syntax) {
|
||
e_.left = SideEffects.simplifyUnusedExpr(p, e_.left) orelse return e_.right;
|
||
}
|
||
},
|
||
.bin_loose_eq => {
|
||
const equality = e_.left.data.eql(e_.right.data, p, .loose);
|
||
if (equality.ok) {
|
||
if (equality.is_require_main_and_module) {
|
||
p.ignoreUsageOfRuntimeRequire();
|
||
p.ignoreUsage(p.module_ref);
|
||
return p.valueForImportMetaMain(false, v.loc);
|
||
}
|
||
|
||
return p.newExpr(
|
||
E.Boolean{ .value = equality.equal },
|
||
v.loc,
|
||
);
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
// "x == void 0" => "x == null"
|
||
if (e_.left.data == .e_undefined) {
|
||
e_.left.data = .{ .e_null = E.Null{} };
|
||
} else if (e_.right.data == .e_undefined) {
|
||
e_.right.data = .{ .e_null = E.Null{} };
|
||
}
|
||
}
|
||
|
||
// const after_op_loc = locAfterOp(e_.);
|
||
// TODO: warn about equality check
|
||
// TODO: warn about typeof string
|
||
|
||
},
|
||
.bin_strict_eq => {
|
||
const equality = e_.left.data.eql(e_.right.data, p, .strict);
|
||
if (equality.ok) {
|
||
if (equality.is_require_main_and_module) {
|
||
p.ignoreUsage(p.module_ref);
|
||
p.ignoreUsageOfRuntimeRequire();
|
||
return p.valueForImportMetaMain(false, v.loc);
|
||
}
|
||
|
||
return p.newExpr(E.Boolean{ .value = equality.equal }, v.loc);
|
||
}
|
||
|
||
// const after_op_loc = locAfterOp(e_.);
|
||
// TODO: warn about equality check
|
||
// TODO: warn about typeof string
|
||
},
|
||
.bin_loose_ne => {
|
||
const equality = e_.left.data.eql(e_.right.data, p, .loose);
|
||
if (equality.ok) {
|
||
if (equality.is_require_main_and_module) {
|
||
p.ignoreUsage(p.module_ref);
|
||
p.ignoreUsageOfRuntimeRequire();
|
||
return p.valueForImportMetaMain(true, v.loc);
|
||
}
|
||
|
||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||
}
|
||
// const after_op_loc = locAfterOp(e_.);
|
||
// TODO: warn about equality check
|
||
// TODO: warn about typeof string
|
||
|
||
// "x != void 0" => "x != null"
|
||
if (@as(Expr.Tag, e_.right.data) == .e_undefined) {
|
||
e_.right = p.newExpr(E.Null{}, e_.right.loc);
|
||
}
|
||
},
|
||
.bin_strict_ne => {
|
||
const equality = e_.left.data.eql(e_.right.data, p, .strict);
|
||
if (equality.ok) {
|
||
if (equality.is_require_main_and_module) {
|
||
p.ignoreUsage(p.module_ref);
|
||
p.ignoreUsageOfRuntimeRequire();
|
||
return p.valueForImportMetaMain(true, v.loc);
|
||
}
|
||
|
||
return p.newExpr(E.Boolean{ .value = !equality.equal }, v.loc);
|
||
}
|
||
},
|
||
.bin_nullish_coalescing => {
|
||
const nullorUndefined = SideEffects.toNullOrUndefined(p, e_.left.data);
|
||
if (nullorUndefined.ok) {
|
||
if (!nullorUndefined.value) {
|
||
return e_.left;
|
||
} else if (nullorUndefined.side_effects == .no_side_effects) {
|
||
// "(null ?? fn)()" => "fn()"
|
||
// "(null ?? this.fn)" => "this.fn"
|
||
// "(null ?? this.fn)()" => "(0, this.fn)()"
|
||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||
return Expr.joinWithComma(Expr{ .data = .{ .e_number = .{ .value = 0.0 } }, .loc = e_.left.loc }, e_.right, p.allocator);
|
||
}
|
||
|
||
return e_.right;
|
||
}
|
||
}
|
||
},
|
||
.bin_logical_or => {
|
||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||
if (side_effects.ok and side_effects.value) {
|
||
return e_.left;
|
||
} else if (side_effects.ok and side_effects.side_effects == .no_side_effects) {
|
||
// "(0 || fn)()" => "fn()"
|
||
// "(0 || this.fn)" => "this.fn"
|
||
// "(0 || this.fn)()" => "(0, this.fn)()"
|
||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||
return Expr.joinWithComma(Expr{ .data = Prefill.Data.Zero, .loc = e_.left.loc }, e_.right, p.allocator);
|
||
}
|
||
|
||
return e_.right;
|
||
}
|
||
},
|
||
.bin_logical_and => {
|
||
const side_effects = SideEffects.toBoolean(p, e_.left.data);
|
||
if (side_effects.ok) {
|
||
if (!side_effects.value) {
|
||
return e_.left;
|
||
} else if (side_effects.side_effects == .no_side_effects) {
|
||
// "(1 && fn)()" => "fn()"
|
||
// "(1 && this.fn)" => "this.fn"
|
||
// "(1 && this.fn)()" => "(0, this.fn)()"
|
||
if (is_call_target and e_.right.hasValueForThisInCall()) {
|
||
return Expr.joinWithComma(Expr{ .data = Prefill.Data.Zero, .loc = e_.left.loc }, e_.right, p.allocator);
|
||
}
|
||
|
||
return e_.right;
|
||
}
|
||
}
|
||
},
|
||
.bin_add => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{ .value = vals[0] + vals[1] }, v.loc);
|
||
}
|
||
|
||
// "'abc' + 'xyz'" => "'abcxyz'"
|
||
if (foldStringAddition(e_.left, e_.right, p.allocator, .normal)) |res| {
|
||
return res;
|
||
}
|
||
|
||
// "(x + 'abc') + 'xyz'" => "'abcxyz'"
|
||
if (e_.left.data.as(.e_binary)) |left| {
|
||
if (left.op == .bin_add) {
|
||
if (foldStringAddition(left.right, e_.right, p.allocator, .nested_left)) |result| {
|
||
return p.newExpr(E.Binary{
|
||
.left = left.left,
|
||
.right = result,
|
||
.op = .bin_add,
|
||
}, e_.left.loc);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.bin_sub => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{ .value = vals[0] - vals[1] }, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_mul => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{ .value = vals[0] * vals[1] }, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_div => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{ .value = vals[0] / vals[1] }, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_rem => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(
|
||
// Use libc fmod here to be consistent with what JavaScriptCore does
|
||
// https://github.com/oven-sh/WebKit/blob/7a0b13626e5db69aa5a32d037431d381df5dfb61/Source/JavaScriptCore/runtime/MathCommon.cpp#L574-L597
|
||
E.Number{ .value = if (comptime Environment.isNative) bun.C.fmod(vals[0], vals[1]) else std.math.mod(f64, vals[0], vals[1]) catch 0 },
|
||
v.loc,
|
||
);
|
||
}
|
||
}
|
||
},
|
||
.bin_pow => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{ .value = std.math.pow(f64, vals[0], vals[1]) }, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_shl => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
const left = floatToInt32(vals[0]);
|
||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||
const result: i32 = @bitCast(std.math.shl(i32, left, right));
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt(result),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_shr => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
const left = floatToInt32(vals[0]);
|
||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||
const result: i32 = @bitCast(std.math.shr(i32, left, right));
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt(result),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_u_shr => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
const left: u32 = @bitCast(floatToInt32(vals[0]));
|
||
const right: u8 = @intCast(@as(u32, @bitCast(floatToInt32(vals[1]))) % 32);
|
||
const result: u32 = std.math.shr(u32, left, right);
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt(result),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_bitwise_and => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt((floatToInt32(vals[0]) & floatToInt32(vals[1]))),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_bitwise_or => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt((floatToInt32(vals[0]) | floatToInt32(vals[1]))),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
.bin_bitwise_xor => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| {
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt((floatToInt32(vals[0]) ^ floatToInt32(vals[1]))),
|
||
}, v.loc);
|
||
}
|
||
}
|
||
},
|
||
// ---------------------------------------------------------------------------------------------------
|
||
.bin_assign => {
|
||
// Optionally preserve the name
|
||
if (e_.left.data == .e_identifier) {
|
||
e_.right = p.maybeKeepExprSymbolName(e_.right, p.symbols.items[e_.left.data.e_identifier.ref.innerIndex()].original_name, was_anonymous_named_expr);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return Expr{ .loc = v.loc, .data = .{ .e_binary = e_ } };
|
||
}
|
||
|
||
pub fn checkAndPrepare(v: *BinaryExpressionVisitor, p: *P) ?Expr {
|
||
var e_ = v.e;
|
||
switch (e_.left.data) {
|
||
// Special-case private identifiers
|
||
.e_private_identifier => |_private| {
|
||
if (e_.op == .bin_in) {
|
||
var private = _private;
|
||
const name = p.loadNameFromRef(private.ref);
|
||
const result = p.findSymbol(e_.left.loc, name) catch unreachable;
|
||
private.ref = result.ref;
|
||
|
||
// Unlike regular identifiers, there are no unbound private identifiers
|
||
const kind: Symbol.Kind = p.symbols.items[result.ref.innerIndex()].kind;
|
||
if (!Symbol.isKindPrivate(kind)) {
|
||
const r = logger.Range{ .loc = e_.left.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{s}\" must be declared in an enclosing class", .{name}) catch unreachable;
|
||
}
|
||
|
||
e_.right = p.visitExpr(e_.right);
|
||
e_.left = .{ .data = .{ .e_private_identifier = private }, .loc = e_.left.loc };
|
||
|
||
// privateSymbolNeedsToBeLowered
|
||
return Expr{ .loc = v.loc, .data = .{ .e_binary = e_ } };
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
v.is_stmt_expr = p.stmt_expr_value == .e_binary and p.stmt_expr_value.e_binary == e_;
|
||
|
||
v.left_in = ExprIn{
|
||
.assign_target = e_.op.binaryAssignTarget(),
|
||
};
|
||
|
||
return null;
|
||
}
|
||
};
|
||
|
||
fn forbidLexicalDecl(p: *P, loc: logger.Loc) anyerror!void {
|
||
try p.log.addError(p.source, loc, "Cannot use a declaration in a single-statement context");
|
||
}
|
||
|
||
/// If we attempt to parse TypeScript syntax outside of a TypeScript file
|
||
/// make it a compile error
|
||
inline fn markTypeScriptOnly(_: *const P) void {
|
||
if (comptime !is_typescript_enabled) {
|
||
@compileError("This function can only be used in TypeScript");
|
||
}
|
||
|
||
// explicitly mark it as unreachable in the hopes that the function doesn't exist at all
|
||
if (!is_typescript_enabled) {
|
||
unreachable;
|
||
}
|
||
}
|
||
|
||
fn logExprErrors(p: *P, errors: *DeferredErrors) void {
|
||
if (errors.invalid_expr_default_value) |r| {
|
||
p.log.addRangeError(
|
||
p.source,
|
||
r,
|
||
"Unexpected \"=\"",
|
||
) catch unreachable;
|
||
}
|
||
|
||
if (errors.invalid_expr_after_question) |r| {
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Unexpected {s}", .{p.source.contents[r.loc.i()..r.endI()]}) catch unreachable;
|
||
}
|
||
|
||
// if (errors.array_spread_feature) |err| {
|
||
// p.markSyntaxFeature(compat.ArraySpread, errors.arraySpreadFeature)
|
||
// }
|
||
}
|
||
|
||
// This assumes the "function" token has already been parsed
|
||
|
||
fn parseFnStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions, asyncRange: ?logger.Range) !Stmt {
|
||
const is_generator = p.lexer.token == T.t_asterisk;
|
||
const is_async = asyncRange != null;
|
||
|
||
if (is_generator) {
|
||
// p.markSyntaxFeature(compat.Generator, p.lexer.Range())
|
||
try p.lexer.next();
|
||
} else if (is_async) {
|
||
// p.markLoweredSyntaxFeature(compat.AsyncAwait, asyncRange, compat.Generator)
|
||
}
|
||
|
||
switch (opts.lexical_decl) {
|
||
.forbid => {
|
||
try p.forbidLexicalDecl(loc);
|
||
},
|
||
|
||
// Allow certain function statements in certain single-statement contexts
|
||
.allow_fn_inside_if, .allow_fn_inside_label => {
|
||
if (opts.is_typescript_declare or is_generator or is_async) {
|
||
try p.forbidLexicalDecl(loc);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
var name: ?js_ast.LocRef = null;
|
||
var nameText: string = "";
|
||
|
||
// The name is optional for "export default function() {}" pseudo-statements
|
||
if (!opts.is_name_optional or p.lexer.token == T.t_identifier) {
|
||
const nameLoc = p.lexer.loc();
|
||
nameText = p.lexer.identifier;
|
||
try p.lexer.expect(T.t_identifier);
|
||
// Difference
|
||
const ref = try p.newSymbol(Symbol.Kind.other, nameText);
|
||
name = js_ast.LocRef{
|
||
.loc = nameLoc,
|
||
.ref = ref,
|
||
};
|
||
}
|
||
|
||
// Even anonymous functions can have TypeScript type parameters
|
||
if (is_typescript_enabled) {
|
||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||
}
|
||
|
||
// Introduce a fake block scope for function declarations inside if statements
|
||
var ifStmtScopeIndex: usize = 0;
|
||
const hasIfScope = opts.lexical_decl == .allow_fn_inside_if;
|
||
if (hasIfScope) {
|
||
ifStmtScopeIndex = try p.pushScopeForParsePass(js_ast.Scope.Kind.block, loc);
|
||
}
|
||
|
||
const scopeIndex = try p.pushScopeForParsePass(js_ast.Scope.Kind.function_args, p.lexer.loc());
|
||
var func = try p.parseFn(name, FnOrArrowDataParse{
|
||
.needs_async_loc = loc,
|
||
.async_range = asyncRange orelse logger.Range.None,
|
||
.has_async_range = asyncRange != null,
|
||
.allow_await = if (is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||
.allow_yield = if (is_generator) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||
.is_typescript_declare = opts.is_typescript_declare,
|
||
|
||
// Only allow omitting the body if we're parsing TypeScript
|
||
.allow_missing_body_for_type_script = is_typescript_enabled,
|
||
});
|
||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// Don't output anything if it's just a forward declaration of a function
|
||
if (opts.is_typescript_declare or func.flags.contains(.is_forward_declaration)) {
|
||
p.popAndDiscardScope(scopeIndex);
|
||
|
||
// Balance the fake block scope introduced above
|
||
if (hasIfScope) {
|
||
p.popScope();
|
||
}
|
||
|
||
if (opts.is_typescript_declare and opts.is_namespace_scope and opts.is_export) {
|
||
p.has_non_local_export_declare_inside_namespace = true;
|
||
}
|
||
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
|
||
p.popScope();
|
||
|
||
// Only declare the function after we know if it had a body or not. Otherwise
|
||
// TypeScript code such as this will double-declare the symbol:
|
||
//
|
||
// function foo(): void;
|
||
// function foo(): void {}
|
||
//
|
||
if (name != null) {
|
||
const kind = if (is_generator or is_async)
|
||
Symbol.Kind.generator_or_async_function
|
||
else
|
||
Symbol.Kind.hoisted_function;
|
||
|
||
name.?.ref = try p.declareSymbol(kind, name.?.loc, nameText);
|
||
func.name = name;
|
||
}
|
||
|
||
func.flags.setPresent(.has_if_scope, hasIfScope);
|
||
func.flags.setPresent(.is_export, opts.is_export);
|
||
|
||
// Balance the fake block scope introduced above
|
||
if (hasIfScope) {
|
||
p.popScope();
|
||
}
|
||
|
||
return p.s(
|
||
S.Function{
|
||
.func = func,
|
||
},
|
||
loc,
|
||
);
|
||
}
|
||
|
||
fn popAndDiscardScope(p: *P, scope_index: usize) void {
|
||
// Move up to the parent scope
|
||
const to_discard = p.current_scope;
|
||
const parent = to_discard.parent orelse unreachable;
|
||
|
||
p.current_scope = parent;
|
||
|
||
// Truncate the scope order where we started to pretend we never saw this scope
|
||
p.scopes_in_order.shrinkRetainingCapacity(scope_index);
|
||
|
||
var children = parent.children;
|
||
// Remove the last child from the parent scope
|
||
const last = children.len - 1;
|
||
if (children.slice()[last] != to_discard) {
|
||
p.panic("Internal error", .{});
|
||
}
|
||
|
||
_ = children.popOrNull();
|
||
}
|
||
|
||
fn parseFn(p: *P, name: ?js_ast.LocRef, opts: FnOrArrowDataParse) anyerror!G.Fn {
|
||
// if data.allowAwait and data.allowYield {
|
||
// p.markSyntaxFeature(compat.AsyncGenerator, data.asyncRange)
|
||
// }
|
||
|
||
var func = G.Fn{
|
||
.name = name,
|
||
|
||
.flags = Flags.Function.init(.{
|
||
.has_rest_arg = false,
|
||
.is_async = opts.allow_await == .allow_expr,
|
||
.is_generator = opts.allow_yield == .allow_expr,
|
||
}),
|
||
|
||
.arguments_ref = null,
|
||
.open_parens_loc = p.lexer.loc(),
|
||
};
|
||
try p.lexer.expect(T.t_open_paren);
|
||
|
||
// Await and yield are not allowed in function arguments
|
||
var old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_parse);
|
||
|
||
p.fn_or_arrow_data_parse.allow_await = if (opts.allow_await == .allow_expr)
|
||
AwaitOrYield.forbid_all
|
||
else
|
||
AwaitOrYield.allow_ident;
|
||
|
||
p.fn_or_arrow_data_parse.allow_yield = if (opts.allow_yield == .allow_expr)
|
||
AwaitOrYield.forbid_all
|
||
else
|
||
AwaitOrYield.allow_ident;
|
||
|
||
// Don't suggest inserting "async" before anything if "await" is found
|
||
p.fn_or_arrow_data_parse.needs_async_loc = logger.Loc.Empty;
|
||
|
||
// If "super()" is allowed in the body, it's allowed in the arguments
|
||
p.fn_or_arrow_data_parse.allow_super_call = opts.allow_super_call;
|
||
p.fn_or_arrow_data_parse.allow_super_property = opts.allow_super_property;
|
||
|
||
var rest_arg: bool = false;
|
||
var arg_has_decorators: bool = false;
|
||
var args = List(G.Arg){};
|
||
while (p.lexer.token != T.t_close_paren) {
|
||
// Skip over "this" type annotations
|
||
if (is_typescript_enabled and p.lexer.token == T.t_this) {
|
||
try p.lexer.next();
|
||
if (p.lexer.token == T.t_colon) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
if (p.lexer.token != T.t_comma) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
continue;
|
||
}
|
||
|
||
var ts_decorators: []ExprNodeIndex = &([_]ExprNodeIndex{});
|
||
if (opts.allow_ts_decorators) {
|
||
ts_decorators = try p.parseTypeScriptDecorators();
|
||
if (ts_decorators.len > 0) {
|
||
arg_has_decorators = true;
|
||
}
|
||
}
|
||
|
||
if (!func.flags.contains(.has_rest_arg) and p.lexer.token == T.t_dot_dot_dot) {
|
||
// p.markSyntaxFeature
|
||
try p.lexer.next();
|
||
rest_arg = true;
|
||
func.flags.insert(.has_rest_arg);
|
||
}
|
||
|
||
var is_typescript_ctor_field = false;
|
||
const is_identifier = p.lexer.token == T.t_identifier;
|
||
var text = p.lexer.identifier;
|
||
var arg = try p.parseBinding(.{});
|
||
var ts_metadata = TypeScript.Metadata.default;
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (is_identifier and opts.is_constructor) {
|
||
// Skip over TypeScript accessibility modifiers, which turn this argument
|
||
// into a class field when used inside a class constructor. This is known
|
||
// as a "parameter property" in TypeScript.
|
||
while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_identifier, .t_open_brace, .t_open_bracket => {
|
||
if (!js_lexer.TypeScriptAccessibilityModifier.has(text)) {
|
||
break;
|
||
}
|
||
|
||
is_typescript_ctor_field = true;
|
||
|
||
// TypeScript requires an identifier binding
|
||
if (p.lexer.token != .t_identifier) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
text = p.lexer.identifier;
|
||
|
||
// Re-parse the binding (the current binding is the TypeScript keyword)
|
||
arg = try p.parseBinding(.{});
|
||
},
|
||
else => {
|
||
break;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
// "function foo(a?) {}"
|
||
if (p.lexer.token == .t_question) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// "function foo(a: any) {}"
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
if (!rest_arg) {
|
||
if (p.options.features.emit_decorator_metadata and
|
||
opts.allow_ts_decorators and
|
||
(opts.has_argument_decorators or opts.has_decorators or arg_has_decorators))
|
||
{
|
||
ts_metadata = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||
} else {
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
} else {
|
||
// rest parameter is always object, leave metadata as m_none
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
}
|
||
}
|
||
|
||
var parseStmtOpts = ParseStatementOptions{};
|
||
p.declareBinding(.hoisted, &arg, &parseStmtOpts) catch unreachable;
|
||
|
||
var default_value: ?ExprNodeIndex = null;
|
||
if (!func.flags.contains(.has_rest_arg) and p.lexer.token == .t_equals) {
|
||
// p.markSyntaxFeature
|
||
try p.lexer.next();
|
||
default_value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
args.append(p.allocator, G.Arg{
|
||
.ts_decorators = ExprNodeList.init(ts_decorators),
|
||
.binding = arg,
|
||
.default = default_value,
|
||
|
||
// We need to track this because it affects code generation
|
||
.is_typescript_ctor_field = is_typescript_ctor_field,
|
||
.ts_metadata = ts_metadata,
|
||
}) catch unreachable;
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (func.flags.contains(.has_rest_arg)) {
|
||
// JavaScript does not allow a comma after a rest argument
|
||
if (opts.is_typescript_declare) {
|
||
// TypeScript does allow a comma after a rest argument in a "declare" context
|
||
try p.lexer.next();
|
||
} else {
|
||
try p.lexer.expect(.t_close_paren);
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
rest_arg = false;
|
||
}
|
||
if (args.items.len > 0) {
|
||
func.args = args.items;
|
||
}
|
||
|
||
// Reserve the special name "arguments" in this scope. This ensures that it
|
||
// shadows any variable called "arguments" in any parent scopes. But only do
|
||
// this if it wasn't already declared above because arguments are allowed to
|
||
// be called "arguments", in which case the real "arguments" is inaccessible.
|
||
if (!p.current_scope.members.contains("arguments")) {
|
||
func.arguments_ref = p.declareSymbolMaybeGenerated(.arguments, func.open_parens_loc, arguments_str, false) catch unreachable;
|
||
p.symbols.items[func.arguments_ref.?.innerIndex()].must_not_be_renamed = true;
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
p.fn_or_arrow_data_parse = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_parse), &old_fn_or_arrow_data);
|
||
|
||
p.fn_or_arrow_data_parse.has_argument_decorators = arg_has_decorators;
|
||
|
||
// "function foo(): any {}"
|
||
if (is_typescript_enabled) {
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
|
||
if (p.options.features.emit_decorator_metadata and opts.allow_ts_decorators and (opts.has_argument_decorators or opts.has_decorators)) {
|
||
func.return_ts_metadata = try p.skipTypescriptReturnTypeWithMetadata();
|
||
} else {
|
||
try p.skipTypescriptReturnType();
|
||
}
|
||
} else if (p.options.features.emit_decorator_metadata and opts.allow_ts_decorators and (opts.has_argument_decorators or opts.has_decorators)) {
|
||
if (func.flags.contains(.is_async)) {
|
||
func.return_ts_metadata = .m_promise;
|
||
} else {
|
||
func.return_ts_metadata = .m_undefined;
|
||
}
|
||
}
|
||
}
|
||
|
||
// "function foo(): any;"
|
||
if (opts.allow_missing_body_for_type_script and p.lexer.token != .t_open_brace) {
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
func.flags.insert(.is_forward_declaration);
|
||
return func;
|
||
}
|
||
var tempOpts = opts;
|
||
func.body = try p.parseFnBody(&tempOpts);
|
||
|
||
return func;
|
||
}
|
||
|
||
pub inline fn skipTypescriptReturnType(p: *P) anyerror!void {
|
||
try p.skipTypeScriptTypeWithOpts(.lowest, TypeScript.SkipTypeOptions.Bitset.initOne(.is_return_type), false, {});
|
||
}
|
||
|
||
pub inline fn skipTypescriptReturnTypeWithMetadata(p: *P) anyerror!TypeScript.Metadata {
|
||
var result = TypeScript.Metadata.default;
|
||
try p.skipTypeScriptTypeWithOpts(.lowest, TypeScript.SkipTypeOptions.Bitset.initOne(.is_return_type), true, &result);
|
||
return result;
|
||
}
|
||
|
||
pub fn parseTypeScriptDecorators(p: *P) ![]ExprNodeIndex {
|
||
if (!is_typescript_enabled) {
|
||
return &([_]ExprNodeIndex{});
|
||
}
|
||
|
||
var decorators = ListManaged(ExprNodeIndex).init(p.allocator);
|
||
while (p.lexer.token == T.t_at) {
|
||
try p.lexer.next();
|
||
|
||
// Parse a new/call expression with "exprFlagTSDecorator" so we ignore
|
||
// EIndex expressions, since they may be part of a computed property:
|
||
//
|
||
// class Foo {
|
||
// @foo ['computed']() {}
|
||
// }
|
||
//
|
||
// This matches the behavior of the TypeScript compiler.
|
||
try decorators.append(try p.parseExprWithFlags(.new, Expr.EFlags.ts_decorator));
|
||
}
|
||
|
||
return decorators.items;
|
||
}
|
||
|
||
inline fn skipTypeScriptType(p: *P, level: js_ast.Op.Level) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
try p.skipTypeScriptTypeWithOpts(level, TypeScript.SkipTypeOptions.empty, false, {});
|
||
}
|
||
|
||
inline fn skipTypeScriptTypeWithMetadata(p: *P, level: js_ast.Op.Level) anyerror!TypeScript.Metadata {
|
||
p.markTypeScriptOnly();
|
||
var result = TypeScript.Metadata.default;
|
||
try p.skipTypeScriptTypeWithOpts(level, TypeScript.SkipTypeOptions.empty, true, &result);
|
||
return result;
|
||
}
|
||
|
||
fn skipTypeScriptBinding(p: *P) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
switch (p.lexer.token) {
|
||
.t_identifier, .t_this => {
|
||
try p.lexer.next();
|
||
},
|
||
.t_open_bracket => {
|
||
try p.lexer.next();
|
||
|
||
// "[, , a]"
|
||
|
||
while (p.lexer.token == .t_comma) {
|
||
try p.lexer.next();
|
||
}
|
||
// "[a, b]"
|
||
while (p.lexer.token != .t_close_bracket) {
|
||
// "[...a]"
|
||
if (p.lexer.token == .t_dot_dot_dot) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.skipTypeScriptBinding();
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_bracket);
|
||
},
|
||
.t_open_brace => {
|
||
try p.lexer.next();
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
var found_identifier = false;
|
||
|
||
switch (p.lexer.token) {
|
||
.t_identifier => {
|
||
found_identifier = true;
|
||
try p.lexer.next();
|
||
},
|
||
|
||
// "{...x}"
|
||
.t_dot_dot_dot => {
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token != .t_identifier) {
|
||
try p.lexer.unexpected();
|
||
}
|
||
|
||
found_identifier = true;
|
||
try p.lexer.next();
|
||
},
|
||
|
||
// "{1: y}"
|
||
// "{'x': y}"
|
||
.t_string_literal, .t_numeric_literal => {
|
||
try p.lexer.next();
|
||
},
|
||
|
||
else => {
|
||
if (p.lexer.isIdentifierOrKeyword()) {
|
||
// "{if: x}"
|
||
try p.lexer.next();
|
||
} else {
|
||
try p.lexer.unexpected();
|
||
}
|
||
},
|
||
}
|
||
|
||
if (p.lexer.token == .t_colon or !found_identifier) {
|
||
try p.lexer.expect(.t_colon);
|
||
try p.skipTypeScriptBinding();
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_brace);
|
||
},
|
||
else => {
|
||
// try p.lexer.unexpected();
|
||
return error.Backtrack;
|
||
},
|
||
}
|
||
}
|
||
|
||
fn skipTypescriptFnArgs(p: *P) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
|
||
try p.lexer.expect(.t_open_paren);
|
||
|
||
while (p.lexer.token != .t_close_paren) {
|
||
// "(...a)"
|
||
if (p.lexer.token == .t_dot_dot_dot) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.skipTypeScriptBinding();
|
||
|
||
// "(a?)"
|
||
if (p.lexer.token == .t_question) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// "(a: any)"
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
|
||
// "(a, b)"
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
}
|
||
|
||
// This is a spot where the TypeScript grammar is highly ambiguous. Here are
|
||
// some cases that are valid:
|
||
//
|
||
// let x = (y: any): (() => {}) => { };
|
||
// let x = (y: any): () => {} => { };
|
||
// let x = (y: any): (y) => {} => { };
|
||
// let x = (y: any): (y[]) => {};
|
||
// let x = (y: any): (a | b) => {};
|
||
//
|
||
// Here are some cases that aren't valid:
|
||
//
|
||
// let x = (y: any): (y) => {};
|
||
// let x = (y: any): (y) => {return 0};
|
||
// let x = (y: any): asserts y is (y) => {};
|
||
//
|
||
fn skipTypeScriptParenOrFnType(
|
||
p: *P,
|
||
comptime get_metadata: bool,
|
||
result: if (get_metadata) *TypeScript.Metadata else void,
|
||
) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
|
||
if (p.trySkipTypeScriptArrowArgsWithBacktracking()) {
|
||
try p.skipTypescriptReturnType();
|
||
if (comptime get_metadata)
|
||
result.* = .m_function;
|
||
} else {
|
||
try p.lexer.expect(.t_open_paren);
|
||
if (comptime get_metadata) {
|
||
result.* = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||
} else {
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
try p.lexer.expect(.t_close_paren);
|
||
}
|
||
}
|
||
|
||
fn skipTypeScriptTypeWithOpts(
|
||
p: *P,
|
||
level: js_ast.Op.Level,
|
||
opts: TypeScript.SkipTypeOptions.Bitset,
|
||
comptime get_metadata: bool,
|
||
result: if (get_metadata) *TypeScript.Metadata else void,
|
||
) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
|
||
while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_numeric_literal => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_number;
|
||
}
|
||
},
|
||
.t_big_integer_literal => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_bigint;
|
||
}
|
||
},
|
||
.t_string_literal, .t_no_substitution_template_literal => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_string;
|
||
}
|
||
},
|
||
.t_true, .t_false => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_boolean;
|
||
}
|
||
},
|
||
.t_null => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_null;
|
||
}
|
||
},
|
||
.t_void => {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_void;
|
||
}
|
||
},
|
||
.t_const => {
|
||
const r = p.lexer.range();
|
||
try p.lexer.next();
|
||
|
||
// ["const: number]"
|
||
if (opts.contains(.allow_tuple_labels) and p.lexer.token == .t_colon) {
|
||
try p.log.addRangeError(p.source, r, "Unexpected \"const\"");
|
||
}
|
||
},
|
||
|
||
.t_this => {
|
||
try p.lexer.next();
|
||
|
||
// "function check(): this is boolean"
|
||
if (p.lexer.isContextualKeyword("is") and !p.lexer.has_newline_before) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
return;
|
||
}
|
||
|
||
if (comptime get_metadata) {
|
||
result.* = .m_object;
|
||
}
|
||
},
|
||
.t_minus => {
|
||
// "-123"
|
||
// "-123n"
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_big_integer_literal) {
|
||
try p.lexer.next();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_bigint;
|
||
}
|
||
} else {
|
||
try p.lexer.expect(.t_numeric_literal);
|
||
if (comptime get_metadata) {
|
||
result.* = .m_number;
|
||
}
|
||
}
|
||
},
|
||
.t_ampersand, .t_bar => {
|
||
// Support things like "type Foo = | A | B" and "type Foo = & A & B"
|
||
try p.lexer.next();
|
||
continue;
|
||
},
|
||
.t_import => {
|
||
// "import('fs')"
|
||
try p.lexer.next();
|
||
|
||
// "[import: number]"
|
||
if (opts.contains(.allow_tuple_labels) and p.lexer.token == .t_colon) {
|
||
return;
|
||
}
|
||
|
||
try p.lexer.expect(.t_open_paren);
|
||
try p.lexer.expect(.t_string_literal);
|
||
|
||
// "import('./foo.json', { assert: { type: 'json' } })"
|
||
// "import('./foo.json', { with: { type: 'json' } })"
|
||
if (p.lexer.token == .t_comma) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptObjectType();
|
||
|
||
// "import('./foo.json', { assert: { type: 'json' } }, )"
|
||
// "import('./foo.json', { with: { type: 'json' } }, )"
|
||
if (p.lexer.token == .t_comma) {
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
},
|
||
.t_new => {
|
||
// "new () => Foo"
|
||
// "new <T>() => Foo<T>"
|
||
try p.lexer.next();
|
||
|
||
// "[new: number]"
|
||
if (opts.contains(.allow_tuple_labels) and p.lexer.token == .t_colon) {
|
||
return;
|
||
}
|
||
|
||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||
try p.skipTypeScriptParenOrFnType(get_metadata, result);
|
||
},
|
||
.t_less_than => {
|
||
// "<T>() => Foo<T>"
|
||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||
try p.skipTypeScriptParenOrFnType(get_metadata, result);
|
||
},
|
||
.t_open_paren => {
|
||
// "(number | string)"
|
||
try p.skipTypeScriptParenOrFnType(get_metadata, result);
|
||
},
|
||
.t_identifier => {
|
||
const kind = TypeScript.Identifier.IMap.get(p.lexer.identifier) orelse .normal;
|
||
|
||
var check_type_parameters = true;
|
||
|
||
switch (kind) {
|
||
.prefix_keyof => {
|
||
try p.lexer.next();
|
||
|
||
// Valid:
|
||
// "[keyof: string]"
|
||
// "{[keyof: string]: number}"
|
||
// "{[keyof in string]: number}"
|
||
//
|
||
// Invalid:
|
||
// "A extends B ? keyof : string"
|
||
//
|
||
if ((p.lexer.token != .t_colon and p.lexer.token != .t_in) or (!opts.contains(.is_index_signature) and !opts.contains(.allow_tuple_labels))) {
|
||
try p.skipTypeScriptType(.prefix);
|
||
}
|
||
|
||
if (comptime get_metadata) {
|
||
result.* = .m_object;
|
||
}
|
||
|
||
break;
|
||
},
|
||
.prefix_readonly => {
|
||
try p.lexer.next();
|
||
|
||
if ((p.lexer.token != .t_colon and p.lexer.token != .t_in) or (!opts.contains(.is_index_signature) and !opts.contains(.allow_tuple_labels))) {
|
||
try p.skipTypeScriptType(.prefix);
|
||
}
|
||
|
||
// assume array or tuple literal
|
||
if (comptime get_metadata) {
|
||
result.* = .m_array;
|
||
}
|
||
|
||
break;
|
||
},
|
||
.infer => {
|
||
try p.lexer.next();
|
||
|
||
// "type Foo = Bar extends [infer T] ? T : null"
|
||
// "type Foo = Bar extends [infer T extends string] ? T : null"
|
||
// "type Foo = Bar extends [infer T extends string ? infer T : never] ? T : null"
|
||
// "type Foo = { [infer in Bar]: number }"
|
||
if ((p.lexer.token != .t_colon and p.lexer.token != .t_in) or (!opts.contains(.is_index_signature) and !opts.contains(.allow_tuple_labels))) {
|
||
try p.lexer.expect(.t_identifier);
|
||
if (p.lexer.token == .t_extends) {
|
||
_ = p.trySkipTypeScriptConstraintOfInferTypeWithBacktracking(opts);
|
||
}
|
||
}
|
||
|
||
break;
|
||
},
|
||
.unique => {
|
||
try p.lexer.next();
|
||
|
||
// "let foo: unique symbol"
|
||
if (p.lexer.isContextualKeyword("symbol")) {
|
||
try p.lexer.next();
|
||
break;
|
||
}
|
||
},
|
||
.abstract => {
|
||
try p.lexer.next();
|
||
|
||
// "let foo: abstract new () => {}" added in TypeScript 4.2
|
||
if (p.lexer.token == .t_new) {
|
||
continue;
|
||
}
|
||
},
|
||
.asserts => {
|
||
try p.lexer.next();
|
||
|
||
// "function assert(x: boolean): asserts x"
|
||
// "function assert(x: boolean): asserts x is boolean"
|
||
if (opts.contains(.is_return_type) and !p.lexer.has_newline_before and (p.lexer.token == .t_identifier or p.lexer.token == .t_this)) {
|
||
try p.lexer.next();
|
||
}
|
||
},
|
||
.primitive_any => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_any;
|
||
}
|
||
},
|
||
.primitive_never => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_never;
|
||
}
|
||
},
|
||
.primitive_unknown => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_unknown;
|
||
}
|
||
},
|
||
.primitive_undefined => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_undefined;
|
||
}
|
||
},
|
||
.primitive_object => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_object;
|
||
}
|
||
},
|
||
.primitive_number => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_number;
|
||
}
|
||
},
|
||
.primitive_string => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_string;
|
||
}
|
||
},
|
||
.primitive_boolean => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_boolean;
|
||
}
|
||
},
|
||
.primitive_bigint => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_bigint;
|
||
}
|
||
},
|
||
.primitive_symbol => {
|
||
try p.lexer.next();
|
||
check_type_parameters = false;
|
||
if (comptime get_metadata) {
|
||
result.* = .m_symbol;
|
||
}
|
||
},
|
||
else => {
|
||
if (comptime get_metadata) {
|
||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||
result.* = .{ .m_identifier = find_result.ref };
|
||
}
|
||
|
||
try p.lexer.next();
|
||
},
|
||
}
|
||
|
||
// "function assert(x: any): x is boolean"
|
||
if (p.lexer.isContextualKeyword("is") and !p.lexer.has_newline_before) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
return;
|
||
}
|
||
|
||
// "let foo: any \n <number>foo" must not become a single type
|
||
if (check_type_parameters and !p.lexer.has_newline_before) {
|
||
_ = try p.skipTypeScriptTypeArguments(false);
|
||
}
|
||
},
|
||
.t_typeof => {
|
||
try p.lexer.next();
|
||
|
||
// "[typeof: number]"
|
||
if (opts.contains(.allow_tuple_labels) and p.lexer.token == .t_colon) {
|
||
return;
|
||
}
|
||
|
||
// always `Object`
|
||
if (comptime get_metadata) {
|
||
result.* = .m_object;
|
||
}
|
||
|
||
if (p.lexer.token == .t_import) {
|
||
// "typeof import('fs')"
|
||
continue;
|
||
} else {
|
||
// "typeof x"
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expected(.t_identifier);
|
||
}
|
||
try p.lexer.next();
|
||
|
||
// "typeof x.#y"
|
||
// "typeof x.y"
|
||
while (p.lexer.token == .t_dot) {
|
||
try p.lexer.next();
|
||
|
||
if (!p.lexer.isIdentifierOrKeyword() and p.lexer.token != .t_private_identifier) {
|
||
try p.lexer.expected(.t_identifier);
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
if (!p.lexer.has_newline_before) {
|
||
_ = try p.skipTypeScriptTypeArguments(false);
|
||
}
|
||
}
|
||
},
|
||
.t_open_bracket => {
|
||
// "[number, string]"
|
||
// "[first: number, second: string]"
|
||
try p.lexer.next();
|
||
|
||
if (comptime get_metadata) {
|
||
result.* = .m_array;
|
||
}
|
||
|
||
while (p.lexer.token != .t_close_bracket) {
|
||
if (p.lexer.token == .t_dot_dot_dot) {
|
||
try p.lexer.next();
|
||
}
|
||
try p.skipTypeScriptTypeWithOpts(.lowest, TypeScript.SkipTypeOptions.Bitset.initOne(.allow_tuple_labels), false, {});
|
||
if (p.lexer.token == .t_question) {
|
||
try p.lexer.next();
|
||
}
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
try p.lexer.expect(.t_close_bracket);
|
||
},
|
||
.t_open_brace => {
|
||
try p.skipTypeScriptObjectType();
|
||
if (comptime get_metadata) {
|
||
result.* = .m_object;
|
||
}
|
||
},
|
||
.t_template_head => {
|
||
// "`${'a' | 'b'}-${'c' | 'd'}`"
|
||
while (true) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.rescanCloseBraceAsTemplateToken();
|
||
|
||
if (p.lexer.token == .t_template_tail) {
|
||
try p.lexer.next();
|
||
break;
|
||
}
|
||
}
|
||
if (comptime get_metadata) {
|
||
result.* = .m_string;
|
||
}
|
||
},
|
||
|
||
else => {
|
||
// "[function: number]"
|
||
if (opts.contains(.allow_tuple_labels) and p.lexer.isIdentifierOrKeyword()) {
|
||
if (p.lexer.token != .t_function) {
|
||
try p.lexer.unexpected();
|
||
}
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token != .t_colon) {
|
||
try p.lexer.expect(.t_colon);
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
try p.lexer.unexpected();
|
||
},
|
||
}
|
||
break;
|
||
}
|
||
|
||
while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_bar => {
|
||
if (level.gte(.bitwise_or)) {
|
||
return;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (comptime get_metadata) {
|
||
var left = result.*;
|
||
if (left.finishUnion(p)) |final| {
|
||
// finish skipping the rest of the type without collecting type metadata.
|
||
result.* = final;
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_or, opts, false, {});
|
||
} else {
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_or, opts, get_metadata, result);
|
||
result.mergeUnion(left);
|
||
}
|
||
} else {
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_or, opts, false, {});
|
||
}
|
||
},
|
||
.t_ampersand => {
|
||
if (level.gte(.bitwise_and)) {
|
||
return;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (comptime get_metadata) {
|
||
var left = result.*;
|
||
if (left.finishIntersection(p)) |final| {
|
||
// finish skipping the rest of the type without collecting type metadata.
|
||
result.* = final;
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_and, opts, false, {});
|
||
} else {
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_and, opts, get_metadata, result);
|
||
result.mergeIntersection(left);
|
||
}
|
||
} else {
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_and, opts, false, {});
|
||
}
|
||
},
|
||
.t_exclamation => {
|
||
// A postfix "!" is allowed in JSDoc types in TypeScript, which are only
|
||
// present in comments. While it's not valid in a non-comment position,
|
||
// it's still parsed and turned into a soft error by the TypeScript
|
||
// compiler. It turns out parsing this is important for correctness for
|
||
// "as" casts because the "!" token must still be consumed.
|
||
if (p.lexer.has_newline_before) {
|
||
return;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
},
|
||
.t_dot => {
|
||
try p.lexer.next();
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
if (comptime get_metadata) {
|
||
if (result.* == .m_identifier) {
|
||
var dot = List(Ref).initCapacity(p.allocator, 2) catch unreachable;
|
||
dot.appendAssumeCapacity(result.m_identifier);
|
||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||
dot.appendAssumeCapacity(find_result.ref);
|
||
result.* = .{ .m_dot = dot };
|
||
} else if (result.* == .m_dot) {
|
||
if (p.lexer.isIdentifierOrKeyword()) {
|
||
const find_result = p.findSymbol(logger.Loc.Empty, p.lexer.identifier) catch unreachable;
|
||
result.m_dot.append(p.allocator, find_result.ref) catch unreachable;
|
||
}
|
||
}
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
// "{ <A extends B>(): c.d \n <E extends F>(): g.h }" must not become a single type
|
||
if (!p.lexer.has_newline_before) {
|
||
_ = try p.skipTypeScriptTypeArguments(false);
|
||
}
|
||
},
|
||
.t_open_bracket => {
|
||
// "{ ['x']: string \n ['y']: string }" must not become a single type
|
||
if (p.lexer.has_newline_before) {
|
||
return;
|
||
}
|
||
try p.lexer.next();
|
||
var skipped = false;
|
||
if (p.lexer.token != .t_close_bracket) {
|
||
skipped = true;
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
try p.lexer.expect(.t_close_bracket);
|
||
|
||
if (comptime get_metadata) {
|
||
if (result.* == .m_none) {
|
||
result.* = .m_array;
|
||
} else {
|
||
// if something was skipped, it is object type
|
||
if (skipped) {
|
||
result.* = .m_object;
|
||
} else {
|
||
result.* = .m_array;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.t_extends => {
|
||
// "{ x: number \n extends: boolean }" must not become a single type
|
||
if (p.lexer.has_newline_before or opts.contains(.disallow_conditional_types)) {
|
||
return;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
// The type following "extends" is not permitted to be another conditional type
|
||
var extends_type = if (get_metadata) TypeScript.Metadata.default else {};
|
||
try p.skipTypeScriptTypeWithOpts(
|
||
.lowest,
|
||
TypeScript.SkipTypeOptions.Bitset.initOne(.disallow_conditional_types),
|
||
get_metadata,
|
||
if (get_metadata) &extends_type else {},
|
||
);
|
||
|
||
if (comptime get_metadata) {
|
||
// intersection
|
||
try p.lexer.expect(.t_question);
|
||
var left = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||
try p.lexer.expect(.t_colon);
|
||
if (left.finishIntersection(p)) |final| {
|
||
result.* = final;
|
||
try p.skipTypeScriptType(.lowest);
|
||
} else {
|
||
try p.skipTypeScriptTypeWithOpts(.bitwise_and, TypeScript.SkipTypeOptions.empty, get_metadata, result);
|
||
result.mergeIntersection(left);
|
||
}
|
||
} else {
|
||
try p.lexer.expect(.t_question);
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.expect(.t_colon);
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
},
|
||
else => {
|
||
return;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
fn skipTypeScriptObjectType(p: *P) anyerror!void {
|
||
p.markTypeScriptOnly();
|
||
|
||
try p.lexer.expect(.t_open_brace);
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
// "{ -readonly [K in keyof T]: T[K] }"
|
||
// "{ +readonly [K in keyof T]: T[K] }"
|
||
if (p.lexer.token == .t_plus or p.lexer.token == .t_minus) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Skip over modifiers and the property identifier
|
||
var found_key = false;
|
||
while (p.lexer.isIdentifierOrKeyword() or p.lexer.token == .t_string_literal or p.lexer.token == .t_numeric_literal) {
|
||
try p.lexer.next();
|
||
found_key = true;
|
||
}
|
||
|
||
if (p.lexer.token == .t_open_bracket) {
|
||
// Index signature or computed property
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptTypeWithOpts(.lowest, TypeScript.SkipTypeOptions.Bitset.initOne(.is_index_signature), false, {});
|
||
|
||
// "{ [key: string]: number }"
|
||
// "{ readonly [K in keyof T]: T[K] }"
|
||
switch (p.lexer.token) {
|
||
.t_colon => {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
},
|
||
.t_in => {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
// "{ [K in keyof T as `get-${K}`]: T[K] }"
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_bracket);
|
||
|
||
// "{ [K in keyof T]+?: T[K] }"
|
||
// "{ [K in keyof T]-?: T[K] }"
|
||
switch (p.lexer.token) {
|
||
.t_plus, .t_minus => {
|
||
try p.lexer.next();
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
found_key = true;
|
||
}
|
||
|
||
// "?" indicates an optional property
|
||
// "!" indicates an initialization assertion
|
||
if (found_key and (p.lexer.token == .t_question or p.lexer.token == .t_exclamation)) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Type parameters come right after the optional mark
|
||
_ = try p.skipTypeScriptTypeParameters(.{
|
||
.allow_const_modifier = true,
|
||
});
|
||
|
||
switch (p.lexer.token) {
|
||
.t_colon => {
|
||
// Regular property
|
||
if (!found_key) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
},
|
||
.t_open_paren => {
|
||
// Method signature
|
||
try p.skipTypescriptFnArgs();
|
||
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
try p.skipTypescriptReturnType();
|
||
}
|
||
},
|
||
else => {
|
||
if (!found_key) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
},
|
||
}
|
||
switch (p.lexer.token) {
|
||
.t_close_brace => {},
|
||
.t_comma, .t_semicolon => {
|
||
try p.lexer.next();
|
||
},
|
||
else => {
|
||
if (!p.lexer.has_newline_before) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
},
|
||
}
|
||
}
|
||
try p.lexer.expect(.t_close_brace);
|
||
}
|
||
|
||
fn processImportStatement(p: *P, stmt_: S.Import, path: ParsedPath, loc: logger.Loc, was_originally_bare_import: bool) anyerror!Stmt {
|
||
const is_macro = FeatureFlags.is_macro_enabled and (path.is_macro or js_ast.Macro.isMacroPath(path.text));
|
||
var stmt = stmt_;
|
||
if (is_macro) {
|
||
const id = p.addImportRecord(.stmt, path.loc, path.text);
|
||
p.import_records.items[id].path.namespace = js_ast.Macro.namespace;
|
||
p.import_records.items[id].is_unused = true;
|
||
|
||
if (stmt.default_name) |name_loc| {
|
||
const name = p.loadNameFromRef(name_loc.ref.?);
|
||
const ref = try p.declareSymbol(.other, name_loc.loc, name);
|
||
try p.is_import_item.put(p.allocator, ref, {});
|
||
try p.macro.refs.put(ref, id);
|
||
}
|
||
|
||
for (stmt.items) |item| {
|
||
const name = p.loadNameFromRef(item.name.ref.?);
|
||
const ref = try p.declareSymbol(.other, item.name.loc, name);
|
||
try p.is_import_item.put(p.allocator, ref, {});
|
||
try p.macro.refs.put(ref, id);
|
||
}
|
||
|
||
return p.s(S.Empty{}, loc);
|
||
}
|
||
|
||
const macro_remap = if ((comptime allow_macros) and !is_macro)
|
||
p.options.macro_context.getRemap(path.text)
|
||
else
|
||
null;
|
||
|
||
stmt.import_record_index = p.addImportRecord(.stmt, path.loc, path.text);
|
||
p.import_records.items[stmt.import_record_index].was_originally_bare_import = was_originally_bare_import;
|
||
|
||
if (stmt.star_name_loc) |star| {
|
||
const name = p.loadNameFromRef(stmt.namespace_ref);
|
||
|
||
stmt.namespace_ref = try p.declareSymbol(.import, star, name);
|
||
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
p.parse_pass_symbol_uses.put(name, .{
|
||
.ref = stmt.namespace_ref,
|
||
.import_record_index = stmt.import_record_index,
|
||
}) catch unreachable;
|
||
}
|
||
} else {
|
||
var path_name = fs.PathName.init(path.text);
|
||
const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator));
|
||
stmt.namespace_ref = try p.newSymbol(.other, name);
|
||
var scope: *Scope = p.current_scope;
|
||
try scope.generated.push(p.allocator, stmt.namespace_ref);
|
||
}
|
||
|
||
var item_refs = ImportItemForNamespaceMap.init(p.allocator);
|
||
const count_excluding_namespace = @as(u16, @intCast(stmt.items.len)) +
|
||
@as(u16, @intCast(@intFromBool(stmt.default_name != null)));
|
||
|
||
try item_refs.ensureUnusedCapacity(count_excluding_namespace);
|
||
// Even though we allocate ahead of time here
|
||
// we cannot use putAssumeCapacity because a symbol can have existing links
|
||
// those may write to this hash table, so this estimate may be innaccurate
|
||
try p.is_import_item.ensureUnusedCapacity(p.allocator, count_excluding_namespace);
|
||
var remap_count: u32 = 0;
|
||
// Link the default item to the namespace
|
||
if (stmt.default_name) |*name_loc| outer: {
|
||
const name = p.loadNameFromRef(name_loc.ref.?);
|
||
const ref = try p.declareSymbol(.import, name_loc.loc, name);
|
||
name_loc.ref = ref;
|
||
try p.is_import_item.put(p.allocator, ref, {});
|
||
|
||
// ensure every e_import_identifier holds the namespace
|
||
if (p.options.features.hot_module_reloading) {
|
||
const symbol = &p.symbols.items[ref.inner_index];
|
||
if (symbol.namespace_alias == null) {
|
||
symbol.namespace_alias = .{
|
||
.namespace_ref = stmt.namespace_ref,
|
||
.alias = "default",
|
||
.import_record_index = stmt.import_record_index,
|
||
};
|
||
}
|
||
}
|
||
|
||
if (macro_remap) |*remap| {
|
||
if (remap.get("default")) |remapped_path| {
|
||
const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path);
|
||
try p.macro.refs.put(ref, new_import_id);
|
||
|
||
p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace;
|
||
p.import_records.items[new_import_id].is_unused = true;
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
p.import_records.items[new_import_id].is_internal = true;
|
||
p.import_records.items[new_import_id].path.is_disabled = true;
|
||
}
|
||
stmt.default_name = null;
|
||
remap_count += 1;
|
||
break :outer;
|
||
}
|
||
}
|
||
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
p.parse_pass_symbol_uses.put(name, .{
|
||
.ref = ref,
|
||
.import_record_index = stmt.import_record_index,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
if (is_macro) {
|
||
try p.macro.refs.put(ref, stmt.import_record_index);
|
||
stmt.default_name = null;
|
||
break :outer;
|
||
}
|
||
|
||
if (comptime ParsePassSymbolUsageType != void) {
|
||
p.parse_pass_symbol_uses.put(name, .{
|
||
.ref = ref,
|
||
.import_record_index = stmt.import_record_index,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
item_refs.putAssumeCapacity(name, name_loc.*);
|
||
}
|
||
var end: usize = 0;
|
||
|
||
for (stmt.items) |item_| {
|
||
var item = item_;
|
||
const name = p.loadNameFromRef(item.name.ref orelse unreachable);
|
||
const ref = try p.declareSymbol(.import, item.name.loc, name);
|
||
item.name.ref = ref;
|
||
|
||
try p.is_import_item.put(p.allocator, ref, {});
|
||
p.checkForNonBMPCodePoint(item.alias_loc, item.alias);
|
||
|
||
// ensure every e_import_identifier holds the namespace
|
||
if (p.options.features.hot_module_reloading) {
|
||
const symbol = &p.symbols.items[ref.inner_index];
|
||
if (symbol.namespace_alias == null) {
|
||
symbol.namespace_alias = .{
|
||
.namespace_ref = stmt.namespace_ref,
|
||
.alias = item.alias,
|
||
.import_record_index = stmt.import_record_index,
|
||
};
|
||
}
|
||
}
|
||
|
||
if (macro_remap) |*remap| {
|
||
if (remap.get(item.alias)) |remapped_path| {
|
||
const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path);
|
||
try p.macro.refs.put(ref, new_import_id);
|
||
|
||
p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace;
|
||
p.import_records.items[new_import_id].is_unused = true;
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
p.import_records.items[new_import_id].is_internal = true;
|
||
p.import_records.items[new_import_id].path.is_disabled = true;
|
||
}
|
||
remap_count += 1;
|
||
continue;
|
||
}
|
||
}
|
||
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
p.parse_pass_symbol_uses.put(name, .{
|
||
.ref = ref,
|
||
.import_record_index = stmt.import_record_index,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
item_refs.putAssumeCapacity(item.alias, item.name);
|
||
stmt.items[end] = item;
|
||
end += 1;
|
||
}
|
||
stmt.items = stmt.items[0..end];
|
||
|
||
// If we remapped the entire import away
|
||
// i.e. import {graphql} "react-relay"
|
||
|
||
if (remap_count > 0 and stmt.items.len == 0 and stmt.default_name == null) {
|
||
p.import_records.items[stmt.import_record_index].path.namespace = js_ast.Macro.namespace;
|
||
p.import_records.items[stmt.import_record_index].is_unused = true;
|
||
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
p.import_records.items[stmt.import_record_index].path.is_disabled = true;
|
||
p.import_records.items[stmt.import_record_index].is_internal = true;
|
||
}
|
||
|
||
return p.s(S.Empty{}, loc);
|
||
} else if (remap_count > 0) {
|
||
item_refs.shrinkAndFree(stmt.items.len + @as(usize, @intFromBool(stmt.default_name != null)));
|
||
}
|
||
|
||
if (path.import_tag != .none) {
|
||
try p.validateImportType(path.import_tag, &stmt);
|
||
}
|
||
|
||
// Track the items for this namespace
|
||
try p.import_items_for_namespace.put(p.allocator, stmt.namespace_ref, item_refs);
|
||
return p.s(stmt, loc);
|
||
}
|
||
|
||
fn validateImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void {
|
||
@setCold(true);
|
||
|
||
if (import_tag.loader() != null) {
|
||
p.import_records.items[stmt.import_record_index].tag = import_tag;
|
||
|
||
if (import_tag.isSQLite()) {
|
||
for (stmt.items) |*item| {
|
||
if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) {
|
||
try p.log.addError(
|
||
p.source,
|
||
item.name.loc,
|
||
"sqlite imports only support the \"default\" or \"db\" imports",
|
||
);
|
||
break;
|
||
}
|
||
}
|
||
} else if (import_tag.onlySupportsDefaultImports()) {
|
||
for (stmt.items) |*item| {
|
||
if (!(strings.eqlComptime(item.alias, "default"))) {
|
||
try p.log.addError(
|
||
p.source,
|
||
item.name.loc,
|
||
"This loader type only supports the \"default\" import",
|
||
);
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
} else if (import_tag == .bake_resolve_to_ssr_graph) {
|
||
p.import_records.items[stmt.import_record_index].tag = import_tag;
|
||
}
|
||
}
|
||
|
||
// This is the type parameter declarations that go with other symbol
|
||
// declarations (class, function, type, etc.)
|
||
fn skipTypeScriptTypeParameters(p: *P, flags: TypeParameterFlag) anyerror!SkipTypeParameterResult {
|
||
p.markTypeScriptOnly();
|
||
|
||
if (p.lexer.token != .t_less_than) {
|
||
return .did_not_skip_anything;
|
||
}
|
||
|
||
var result = SkipTypeParameterResult.could_be_type_cast;
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_greater_than and flags.allow_empty_type_parameters) {
|
||
try p.lexer.next();
|
||
return .definitely_type_parameters;
|
||
}
|
||
|
||
while (true) {
|
||
var has_in = false;
|
||
var has_out = false;
|
||
var expect_identifier = true;
|
||
|
||
var invalid_modifier_range = logger.Range.None;
|
||
|
||
// Scan over a sequence of "in" and "out" modifiers (a.k.a. optional
|
||
// variance annotations) as well as "const" modifiers
|
||
while (true) {
|
||
if (p.lexer.token == .t_const) {
|
||
if (invalid_modifier_range.len == 0 and !flags.allow_const_modifier) {
|
||
// Valid:
|
||
// "class Foo<const T> {}"
|
||
// Invalid:
|
||
// "interface Foo<const T> {}"
|
||
invalid_modifier_range = p.lexer.range();
|
||
}
|
||
|
||
result = .definitely_type_parameters;
|
||
try p.lexer.next();
|
||
expect_identifier = true;
|
||
continue;
|
||
}
|
||
|
||
if (p.lexer.token == .t_in) {
|
||
if (invalid_modifier_range.len == 0 and (!flags.allow_in_out_variance_annotations or has_in or has_out)) {
|
||
// Valid:
|
||
// "type Foo<in T> = T"
|
||
// Invalid:
|
||
// "type Foo<in in T> = T"
|
||
// "type Foo<out in T> = T"
|
||
invalid_modifier_range = p.lexer.range();
|
||
}
|
||
|
||
try p.lexer.next();
|
||
has_in = true;
|
||
expect_identifier = true;
|
||
continue;
|
||
}
|
||
|
||
if (p.lexer.isContextualKeyword("out")) {
|
||
const r = p.lexer.range();
|
||
if (invalid_modifier_range.len == 0 and !flags.allow_in_out_variance_annotations) {
|
||
// Valid:
|
||
// "type Foo<out T> = T"
|
||
// Invalid:
|
||
// "type Foo<out out T> = T"
|
||
// "type Foo<in out T> = T"
|
||
invalid_modifier_range = r;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
if (invalid_modifier_range.len == 0 and has_out and (p.lexer.token == .t_in or p.lexer.token == .t_identifier)) {
|
||
// Valid:
|
||
// "type Foo<out T> = T"
|
||
// "type Foo<out out> = T"
|
||
// "type Foo<out out, T> = T"
|
||
// "type Foo<out out = T> = T"
|
||
// "type Foo<out out extends T> = T"
|
||
// Invalid:
|
||
// "type Foo<out out in T> = T"
|
||
// "type Foo<out out T> = T"
|
||
invalid_modifier_range = r;
|
||
}
|
||
has_out = true;
|
||
expect_identifier = false;
|
||
continue;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
// Only report an error for the first invalid modifier
|
||
if (invalid_modifier_range.len > 0) {
|
||
try p.log.addRangeErrorFmt(
|
||
p.source,
|
||
invalid_modifier_range,
|
||
p.allocator,
|
||
"The modifier \"{s}\" is not valid here",
|
||
.{p.source.textForRange(invalid_modifier_range)},
|
||
);
|
||
}
|
||
|
||
// expectIdentifier => Mandatory identifier (e.g. after "type Foo <in ___")
|
||
// !expectIdentifier => Optional identifier (e.g. after "type Foo <out ___" since "out" may be the identifier)
|
||
if (expect_identifier or p.lexer.token == .t_identifier) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
// "class Foo<T extends number> {}"
|
||
if (p.lexer.token == .t_extends) {
|
||
result = .definitely_type_parameters;
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
|
||
// "class Foo<T = void> {}"
|
||
if (p.lexer.token == .t_equals) {
|
||
result = .definitely_type_parameters;
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_greater_than) {
|
||
result = .definitely_type_parameters;
|
||
break;
|
||
}
|
||
}
|
||
|
||
try p.lexer.expectGreaterThan(false);
|
||
return result;
|
||
}
|
||
|
||
fn createDefaultName(p: *P, loc: logger.Loc) !js_ast.LocRef {
|
||
const identifier = try std.fmt.allocPrint(p.allocator, "{s}_default", .{try p.source.path.name.nonUniqueNameString(p.allocator)});
|
||
|
||
const name = js_ast.LocRef{ .loc = loc, .ref = try p.newSymbol(Symbol.Kind.other, identifier) };
|
||
|
||
var scope = p.current_scope;
|
||
|
||
try scope.generated.push(p.allocator, name.ref.?);
|
||
|
||
return name;
|
||
}
|
||
|
||
pub fn newSymbol(p: *P, kind: Symbol.Kind, identifier: string) !Ref {
|
||
const inner_index = @as(Ref.Int, @truncate(p.symbols.items.len));
|
||
try p.symbols.append(Symbol{
|
||
.kind = kind,
|
||
.original_name = identifier,
|
||
.debug_mode_source_index = if (comptime Environment.allow_assert) p.source.index.get() else 0,
|
||
});
|
||
|
||
if (is_typescript_enabled) {
|
||
try p.ts_use_counts.append(p.allocator, 0);
|
||
}
|
||
|
||
return Ref{
|
||
.inner_index = inner_index,
|
||
.source_index = @intCast(p.source.index.get()),
|
||
.tag = .symbol,
|
||
};
|
||
}
|
||
|
||
fn parseLabelName(p: *P) !?js_ast.LocRef {
|
||
if (p.lexer.token != .t_identifier or p.lexer.has_newline_before) {
|
||
return null;
|
||
}
|
||
|
||
const name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(p.lexer.identifier) };
|
||
try p.lexer.next();
|
||
return name;
|
||
}
|
||
|
||
fn parseClassStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) !Stmt {
|
||
var name: ?js_ast.LocRef = null;
|
||
const class_keyword = p.lexer.range();
|
||
if (p.lexer.token == .t_class) {
|
||
//marksyntaxfeature
|
||
try p.lexer.next();
|
||
} else {
|
||
try p.lexer.expected(.t_class);
|
||
}
|
||
|
||
const is_identifier = p.lexer.token == .t_identifier;
|
||
|
||
if (!opts.is_name_optional or (is_identifier and (!is_typescript_enabled or !strings.eqlComptime(p.lexer.identifier, "implements")))) {
|
||
const name_loc = p.lexer.loc();
|
||
const name_text = p.lexer.identifier;
|
||
try p.lexer.expect(.t_identifier);
|
||
|
||
// We must return here
|
||
// or the lexer will crash loop!
|
||
// example:
|
||
// export class {}
|
||
if (!is_identifier) {
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
if (p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(name_text, "await")) {
|
||
try p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"await\" as an identifier here");
|
||
}
|
||
|
||
name = LocRef{ .loc = name_loc, .ref = null };
|
||
if (!opts.is_typescript_declare) {
|
||
(name orelse unreachable).ref = p.declareSymbol(.class, name_loc, name_text) catch unreachable;
|
||
}
|
||
}
|
||
|
||
// Even anonymous classes can have TypeScript type parameters
|
||
if (is_typescript_enabled) {
|
||
_ = try p.skipTypeScriptTypeParameters(.{
|
||
.allow_in_out_variance_annotations = true,
|
||
.allow_const_modifier = true,
|
||
});
|
||
}
|
||
var class_opts = ParseClassOptions{
|
||
.allow_ts_decorators = true,
|
||
.is_type_script_declare = opts.is_typescript_declare,
|
||
};
|
||
if (opts.ts_decorators) |dec| {
|
||
class_opts.ts_decorators = dec.values;
|
||
}
|
||
|
||
const scope_index = p.pushScopeForParsePass(.class_name, loc) catch unreachable;
|
||
const class = try p.parseClass(class_keyword, name, class_opts);
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (opts.is_typescript_declare) {
|
||
p.popAndDiscardScope(scope_index);
|
||
if (opts.is_namespace_scope and opts.is_export) {
|
||
p.has_non_local_export_declare_inside_namespace = true;
|
||
}
|
||
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
|
||
p.popScope();
|
||
return p.s(S.Class{
|
||
.class = class,
|
||
.is_export = opts.is_export,
|
||
}, loc);
|
||
}
|
||
|
||
fn defaultNameForExpr(p: *P, expr: Expr, loc: logger.Loc) LocRef {
|
||
switch (expr.data) {
|
||
.e_function => |func_container| {
|
||
if (func_container.func.name) |_name| {
|
||
if (_name.ref) |ref| {
|
||
return LocRef{ .loc = loc, .ref = ref };
|
||
}
|
||
}
|
||
},
|
||
.e_identifier => |ident| {
|
||
return LocRef{ .loc = loc, .ref = ident.ref };
|
||
},
|
||
.e_import_identifier => |ident| {
|
||
if (!allow_macros or (allow_macros and !p.macro.refs.contains(ident.ref))) {
|
||
return LocRef{ .loc = loc, .ref = ident.ref };
|
||
}
|
||
},
|
||
.e_class => |class| {
|
||
if (class.class_name) |_name| {
|
||
if (_name.ref) |ref| {
|
||
return LocRef{ .loc = loc, .ref = ref };
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return createDefaultName(p, loc) catch unreachable;
|
||
}
|
||
|
||
fn parseStmt(p: *P, opts: *ParseStatementOptions) anyerror!Stmt {
|
||
const loc = p.lexer.loc();
|
||
|
||
switch (p.lexer.token) {
|
||
.t_semicolon => {
|
||
try p.lexer.next();
|
||
return Stmt.empty();
|
||
},
|
||
|
||
.t_export => {
|
||
const previous_export_keyword = p.esm_export_keyword;
|
||
if (opts.is_module_scope) {
|
||
p.esm_export_keyword = p.lexer.range();
|
||
} else if (!opts.is_namespace_scope) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
try p.lexer.next();
|
||
|
||
// TypeScript decorators only work on class declarations
|
||
// "@decorator export class Foo {}"
|
||
// "@decorator export abstract class Foo {}"
|
||
// "@decorator export default class Foo {}"
|
||
// "@decorator export default abstract class Foo {}"
|
||
// "@decorator export declare class Foo {}"
|
||
// "@decorator export declare abstract class Foo {}"
|
||
if (opts.ts_decorators != null and p.lexer.token != js_lexer.T.t_class and
|
||
p.lexer.token != js_lexer.T.t_default and
|
||
!p.lexer.isContextualKeyword("abstract") and
|
||
!p.lexer.isContextualKeyword("declare"))
|
||
{
|
||
try p.lexer.expected(js_lexer.T.t_class);
|
||
}
|
||
|
||
switch (p.lexer.token) {
|
||
T.t_class, T.t_const, T.t_function, T.t_var => {
|
||
opts.is_export = true;
|
||
return p.parseStmt(opts);
|
||
},
|
||
|
||
T.t_import => {
|
||
// "export import foo = bar"
|
||
if (is_typescript_enabled and (opts.is_module_scope or opts.is_namespace_scope)) {
|
||
opts.is_export = true;
|
||
return p.parseStmt(opts);
|
||
}
|
||
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
|
||
T.t_enum => {
|
||
if (!is_typescript_enabled) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
opts.is_export = true;
|
||
return p.parseStmt(opts);
|
||
},
|
||
|
||
T.t_identifier => {
|
||
if (p.lexer.isContextualKeyword("let")) {
|
||
opts.is_export = true;
|
||
return p.parseStmt(opts);
|
||
}
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (opts.is_typescript_declare and p.lexer.isContextualKeyword("as")) {
|
||
// "export as namespace ns;"
|
||
try p.lexer.next();
|
||
try p.lexer.expectContextualKeyword("namespace");
|
||
try p.lexer.expect(T.t_identifier);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
|
||
if (p.lexer.isContextualKeyword("async")) {
|
||
const asyncRange = p.lexer.range();
|
||
try p.lexer.next();
|
||
if (p.lexer.has_newline_before) {
|
||
try p.log.addRangeError(p.source, asyncRange, "Unexpected newline after \"async\"");
|
||
}
|
||
|
||
try p.lexer.expect(T.t_function);
|
||
opts.is_export = true;
|
||
return try p.parseFnStmt(loc, opts, asyncRange);
|
||
}
|
||
|
||
if (is_typescript_enabled) {
|
||
if (TypeScript.Identifier.forStr(p.lexer.identifier)) |ident| {
|
||
switch (ident) {
|
||
.s_type => {
|
||
// "export type foo = ..."
|
||
const type_range = p.lexer.range();
|
||
try p.lexer.next();
|
||
if (p.lexer.has_newline_before) {
|
||
try p.log.addErrorFmt(p.source, type_range.end(), p.allocator, "Unexpected newline after \"type\"", .{});
|
||
return error.SyntaxError;
|
||
}
|
||
var skipper = ParseStatementOptions{ .is_module_scope = opts.is_module_scope, .is_export = true };
|
||
try p.skipTypeScriptTypeStmt(&skipper);
|
||
return p.s(S.TypeScript{}, loc);
|
||
},
|
||
.s_namespace, .s_abstract, .s_module, .s_interface => {
|
||
// "export namespace Foo {}"
|
||
// "export abstract class Foo {}"
|
||
// "export module Foo {}"
|
||
// "export interface Foo {}"
|
||
opts.is_export = true;
|
||
return try p.parseStmt(opts);
|
||
},
|
||
.s_declare => {
|
||
// "export declare class Foo {}"
|
||
opts.is_export = true;
|
||
opts.lexical_decl = .allow_all;
|
||
opts.is_typescript_declare = true;
|
||
return try p.parseStmt(opts);
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
|
||
T.t_default => {
|
||
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
const defaultLoc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
// TypeScript decorators only work on class declarations
|
||
// "@decorator export default class Foo {}"
|
||
// "@decorator export default abstract class Foo {}"
|
||
if (opts.ts_decorators != null and p.lexer.token != T.t_class and !p.lexer.isContextualKeyword("abstract")) {
|
||
try p.lexer.expected(T.t_class);
|
||
}
|
||
|
||
if (p.lexer.isContextualKeyword("async")) {
|
||
const async_range = p.lexer.range();
|
||
try p.lexer.next();
|
||
if (p.lexer.token == T.t_function and !p.lexer.has_newline_before) {
|
||
try p.lexer.next();
|
||
var stmtOpts = ParseStatementOptions{
|
||
.is_name_optional = true,
|
||
.lexical_decl = .allow_all,
|
||
};
|
||
const stmt = try p.parseFnStmt(loc, &stmtOpts, async_range);
|
||
if (@as(Stmt.Tag, stmt.data) == .s_type_script) {
|
||
// This was just a type annotation
|
||
return stmt;
|
||
}
|
||
|
||
const defaultName = if (stmt.data.s_function.func.name) |name|
|
||
js_ast.LocRef{ .loc = name.loc, .ref = name.ref }
|
||
else
|
||
try p.createDefaultName(defaultLoc);
|
||
|
||
const value = js_ast.StmtOrExpr{ .stmt = stmt };
|
||
return p.s(S.ExportDefault{ .default_name = defaultName, .value = value }, loc);
|
||
}
|
||
|
||
const defaultName = try createDefaultName(p, loc);
|
||
|
||
const prefix_expr = try p.parseAsyncPrefixExpr(async_range, Level.comma);
|
||
const expr = try p.parseSuffix(prefix_expr, Level.comma, null, Expr.EFlags.none);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
const value = js_ast.StmtOrExpr{ .expr = expr };
|
||
p.has_export_default = true;
|
||
return p.s(S.ExportDefault{ .default_name = defaultName, .value = value }, loc);
|
||
}
|
||
|
||
if (p.lexer.token == .t_function or p.lexer.token == .t_class or p.lexer.isContextualKeyword("interface")) {
|
||
var _opts = ParseStatementOptions{
|
||
.ts_decorators = opts.ts_decorators,
|
||
.is_name_optional = true,
|
||
.lexical_decl = .allow_all,
|
||
};
|
||
const stmt = try p.parseStmt(&_opts);
|
||
|
||
const default_name: js_ast.LocRef = default_name_getter: {
|
||
switch (stmt.data) {
|
||
// This was just a type annotation
|
||
.s_type_script => {
|
||
return stmt;
|
||
},
|
||
|
||
.s_function => |func_container| {
|
||
if (func_container.func.name) |name| {
|
||
break :default_name_getter LocRef{ .loc = name.loc, .ref = name.ref };
|
||
}
|
||
},
|
||
.s_class => |class| {
|
||
if (class.class.class_name) |name| {
|
||
break :default_name_getter LocRef{ .loc = name.loc, .ref = name.ref };
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
break :default_name_getter createDefaultName(p, defaultLoc) catch unreachable;
|
||
};
|
||
p.has_export_default = true;
|
||
p.has_es_module_syntax = true;
|
||
return p.s(
|
||
S.ExportDefault{ .default_name = default_name, .value = js_ast.StmtOrExpr{ .stmt = stmt } },
|
||
loc,
|
||
);
|
||
}
|
||
|
||
const is_identifier = p.lexer.token == .t_identifier;
|
||
const name = p.lexer.identifier;
|
||
const expr = try p.parseExpr(.comma);
|
||
|
||
// Handle the default export of an abstract class in TypeScript
|
||
if (is_typescript_enabled and is_identifier and (p.lexer.token == .t_class or opts.ts_decorators != null) and strings.eqlComptime(name, "abstract")) {
|
||
switch (expr.data) {
|
||
.e_identifier => {
|
||
var stmtOpts = ParseStatementOptions{
|
||
.ts_decorators = opts.ts_decorators,
|
||
.is_name_optional = true,
|
||
};
|
||
const stmt: Stmt = try p.parseClassStmt(loc, &stmtOpts);
|
||
|
||
// Use the statement name if present, since it's a better name
|
||
const default_name: js_ast.LocRef = default_name_getter: {
|
||
switch (stmt.data) {
|
||
// This was just a type annotation
|
||
.s_type_script => {
|
||
return stmt;
|
||
},
|
||
|
||
.s_function => |func_container| {
|
||
if (func_container.func.name) |_name| {
|
||
break :default_name_getter LocRef{ .loc = defaultLoc, .ref = _name.ref };
|
||
}
|
||
},
|
||
.s_class => |class| {
|
||
if (class.class.class_name) |_name| {
|
||
break :default_name_getter LocRef{ .loc = defaultLoc, .ref = _name.ref };
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
break :default_name_getter createDefaultName(p, defaultLoc) catch unreachable;
|
||
};
|
||
p.has_export_default = true;
|
||
return p.s(S.ExportDefault{ .default_name = default_name, .value = js_ast.StmtOrExpr{ .stmt = stmt } }, loc);
|
||
},
|
||
else => {
|
||
p.panic("internal error: unexpected", .{});
|
||
},
|
||
}
|
||
}
|
||
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
// Use the expression name if present, since it's a better name
|
||
p.has_export_default = true;
|
||
return p.s(
|
||
S.ExportDefault{
|
||
.default_name = p.defaultNameForExpr(expr, defaultLoc),
|
||
.value = js_ast.StmtOrExpr{
|
||
.expr = expr,
|
||
},
|
||
},
|
||
loc,
|
||
);
|
||
},
|
||
T.t_asterisk => {
|
||
if (!opts.is_module_scope and !(opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
var namespace_ref: Ref = Ref.None;
|
||
var alias: ?js_ast.G.ExportStarAlias = null;
|
||
var path: ParsedPath = undefined;
|
||
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
// "export * as ns from 'path'"
|
||
try p.lexer.next();
|
||
const name = try p.parseClauseAlias("export");
|
||
namespace_ref = try p.storeNameInRef(name);
|
||
alias = G.ExportStarAlias{ .loc = p.lexer.loc(), .original_name = name };
|
||
try p.lexer.next();
|
||
try p.lexer.expectContextualKeyword("from");
|
||
path = try p.parsePath();
|
||
} else {
|
||
// "export * from 'path'"
|
||
try p.lexer.expectContextualKeyword("from");
|
||
path = try p.parsePath();
|
||
const name = try fs.PathName.init(path.text).nonUniqueNameString(p.allocator);
|
||
namespace_ref = try p.storeNameInRef(name);
|
||
}
|
||
|
||
const import_record_index = p.addImportRecord(
|
||
ImportKind.stmt,
|
||
path.loc,
|
||
path.text,
|
||
// TODO: import assertions
|
||
// path.assertions
|
||
);
|
||
|
||
if (path.is_macro) {
|
||
try p.log.addError(p.source, path.loc, "cannot use macro in export statement");
|
||
} else if (path.import_tag != .none) {
|
||
try p.log.addError(p.source, loc, "cannot use export statement with \"type\" attribute");
|
||
}
|
||
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
// In the scan pass, we need _some_ way of knowing *not* to mark as unused
|
||
p.import_records.items[import_record_index].calls_runtime_re_export_fn = true;
|
||
}
|
||
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
p.has_es_module_syntax = true;
|
||
return p.s(S.ExportStar{
|
||
.namespace_ref = namespace_ref,
|
||
.alias = alias,
|
||
.import_record_index = import_record_index,
|
||
}, loc);
|
||
},
|
||
T.t_open_brace => {
|
||
if (!opts.is_module_scope and !(opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
const export_clause = try p.parseExportClause();
|
||
if (p.lexer.isContextualKeyword("from")) {
|
||
try p.lexer.expectContextualKeyword("from");
|
||
const parsedPath = try p.parsePath();
|
||
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// export {type Foo} from 'bar';
|
||
// ->
|
||
// nothing
|
||
// https://www.typescriptlang.org/play?useDefineForClassFields=true&esModuleInterop=false&declaration=false&target=99&isolatedModules=false&ts=4.5.4#code/KYDwDg9gTgLgBDAnmYcDeAxCEC+cBmUEAtnAOQBGAhlGQNwBQQA
|
||
if (export_clause.clauses.len == 0 and export_clause.had_type_only_exports) {
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
|
||
if (parsedPath.is_macro) {
|
||
try p.log.addError(p.source, loc, "export from cannot be used with \"type\": \"macro\"");
|
||
} else if (parsedPath.import_tag != .none) {
|
||
try p.log.addError(p.source, loc, "export from cannot be used with \"type\" attribute");
|
||
}
|
||
|
||
const import_record_index = p.addImportRecord(.stmt, parsedPath.loc, parsedPath.text);
|
||
const path_name = fs.PathName.init(parsedPath.text);
|
||
const namespace_ref = p.storeNameInRef(
|
||
std.fmt.allocPrint(
|
||
p.allocator,
|
||
"import_{}",
|
||
.{
|
||
path_name.fmtIdentifier(),
|
||
},
|
||
) catch bun.outOfMemory(),
|
||
) catch bun.outOfMemory();
|
||
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
// In the scan pass, we need _some_ way of knowing *not* to mark as unused
|
||
p.import_records.items[import_record_index].calls_runtime_re_export_fn = true;
|
||
}
|
||
p.current_scope.is_after_const_local_prefix = true;
|
||
p.has_es_module_syntax = true;
|
||
return p.s(
|
||
S.ExportFrom{
|
||
.items = export_clause.clauses,
|
||
.is_single_line = export_clause.is_single_line,
|
||
.namespace_ref = namespace_ref,
|
||
.import_record_index = import_record_index,
|
||
},
|
||
loc,
|
||
);
|
||
}
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// export {type Foo};
|
||
// ->
|
||
// nothing
|
||
// https://www.typescriptlang.org/play?useDefineForClassFields=true&esModuleInterop=false&declaration=false&target=99&isolatedModules=false&ts=4.5.4#code/KYDwDg9gTgLgBDAnmYcDeAxCEC+cBmUEAtnAOQBGAhlGQNwBQQA
|
||
if (export_clause.clauses.len == 0 and export_clause.had_type_only_exports) {
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
p.has_es_module_syntax = true;
|
||
return p.s(S.ExportClause{ .items = export_clause.clauses, .is_single_line = export_clause.is_single_line }, loc);
|
||
},
|
||
T.t_equals => {
|
||
// "export = value;"
|
||
|
||
p.esm_export_keyword = previous_export_keyword; // This wasn't an ESM export statement after all
|
||
if (is_typescript_enabled) {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.lowest);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.ExportEquals{ .value = value }, loc);
|
||
}
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
else => {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
}
|
||
},
|
||
|
||
.t_function => {
|
||
try p.lexer.next();
|
||
return try p.parseFnStmt(loc, opts, null);
|
||
},
|
||
.t_enum => {
|
||
if (!is_typescript_enabled) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
return p.parseTypescriptEnumStmt(loc, opts);
|
||
},
|
||
.t_at => {
|
||
// Parse decorators before class statements, which are potentially exported
|
||
if (is_typescript_enabled) {
|
||
const scope_index = p.scopes_in_order.items.len;
|
||
const ts_decorators = try p.parseTypeScriptDecorators();
|
||
|
||
// If this turns out to be a "declare class" statement, we need to undo the
|
||
// scopes that were potentially pushed while parsing the decorator arguments.
|
||
// That can look like any one of the following:
|
||
//
|
||
// "@decorator declare class Foo {}"
|
||
// "@decorator declare abstract class Foo {}"
|
||
// "@decorator export declare class Foo {}"
|
||
// "@decorator export declare abstract class Foo {}"
|
||
//
|
||
opts.ts_decorators = DeferredTsDecorators{
|
||
.values = ts_decorators,
|
||
.scope_index = scope_index,
|
||
};
|
||
|
||
// "@decorator class Foo {}"
|
||
// "@decorator abstract class Foo {}"
|
||
// "@decorator declare class Foo {}"
|
||
// "@decorator declare abstract class Foo {}"
|
||
// "@decorator export class Foo {}"
|
||
// "@decorator export abstract class Foo {}"
|
||
// "@decorator export declare class Foo {}"
|
||
// "@decorator export declare abstract class Foo {}"
|
||
// "@decorator export default class Foo {}"
|
||
// "@decorator export default abstract class Foo {}"
|
||
if (p.lexer.token != .t_class and p.lexer.token != .t_export and !p.lexer.isContextualKeyword("abstract") and !p.lexer.isContextualKeyword("declare")) {
|
||
try p.lexer.expected(.t_class);
|
||
}
|
||
|
||
return p.parseStmt(opts);
|
||
}
|
||
// notimpl();
|
||
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
.t_class => {
|
||
if (opts.lexical_decl != .allow_all) {
|
||
try p.forbidLexicalDecl(loc);
|
||
}
|
||
|
||
return try p.parseClassStmt(loc, opts);
|
||
},
|
||
.t_var => {
|
||
try p.lexer.next();
|
||
const decls = try p.parseAndDeclareDecls(.hoisted, opts);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||
},
|
||
.t_const => {
|
||
if (opts.lexical_decl != .allow_all) {
|
||
try p.forbidLexicalDecl(loc);
|
||
}
|
||
// p.markSyntaxFeature(compat.Const, p.lexer.Range())
|
||
|
||
try p.lexer.next();
|
||
|
||
if (is_typescript_enabled and p.lexer.token == T.t_enum) {
|
||
return p.parseTypescriptEnumStmt(loc, opts);
|
||
}
|
||
|
||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
if (!opts.is_typescript_declare) {
|
||
try p.requireInitializers(.k_const, decls.items);
|
||
}
|
||
|
||
return p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls), .is_export = opts.is_export }, loc);
|
||
},
|
||
.t_if => {
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_open_paren);
|
||
const test_ = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_close_paren);
|
||
var stmtOpts = ParseStatementOptions{
|
||
.lexical_decl = .allow_fn_inside_if,
|
||
};
|
||
const yes = try p.parseStmt(&stmtOpts);
|
||
var no: ?Stmt = null;
|
||
if (p.lexer.token == .t_else) {
|
||
try p.lexer.next();
|
||
stmtOpts = ParseStatementOptions{
|
||
.lexical_decl = .allow_fn_inside_if,
|
||
};
|
||
no = try p.parseStmt(&stmtOpts);
|
||
}
|
||
|
||
return p.s(S.If{
|
||
.test_ = test_,
|
||
.yes = yes,
|
||
.no = no,
|
||
}, loc);
|
||
},
|
||
.t_do => {
|
||
try p.lexer.next();
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
try p.lexer.expect(.t_while);
|
||
try p.lexer.expect(.t_open_paren);
|
||
const test_ = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
// This is a weird corner case where automatic semicolon insertion applies
|
||
// even without a newline present
|
||
if (p.lexer.token == .t_semicolon) {
|
||
try p.lexer.next();
|
||
}
|
||
return p.s(S.DoWhile{ .body = body, .test_ = test_ }, loc);
|
||
},
|
||
.t_while => {
|
||
try p.lexer.next();
|
||
|
||
try p.lexer.expect(.t_open_paren);
|
||
const test_ = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
|
||
return p.s(S.While{
|
||
.body = body,
|
||
.test_ = test_,
|
||
}, loc);
|
||
},
|
||
.t_with => {
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_open_paren);
|
||
const test_ = try p.parseExpr(.lowest);
|
||
const body_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
// Push a scope so we make sure to prevent any bare identifiers referenced
|
||
// within the body from being renamed. Renaming them might change the
|
||
// semantics of the code.
|
||
_ = try p.pushScopeForParsePass(.with, body_loc);
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
p.popScope();
|
||
|
||
return p.s(S.With{ .body = body, .body_loc = body_loc, .value = test_ }, loc);
|
||
},
|
||
.t_switch => {
|
||
try p.lexer.next();
|
||
|
||
try p.lexer.expect(.t_open_paren);
|
||
const test_ = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
const body_loc = p.lexer.loc();
|
||
_ = try p.pushScopeForParsePass(.block, body_loc);
|
||
defer p.popScope();
|
||
|
||
try p.lexer.expect(.t_open_brace);
|
||
var cases = ListManaged(js_ast.Case).init(p.allocator);
|
||
var foundDefault = false;
|
||
var stmtOpts = ParseStatementOptions{ .lexical_decl = .allow_all };
|
||
var value: ?js_ast.Expr = null;
|
||
while (p.lexer.token != .t_close_brace) {
|
||
var body = StmtList.init(p.allocator);
|
||
value = null;
|
||
if (p.lexer.token == .t_default) {
|
||
if (foundDefault) {
|
||
try p.log.addRangeError(p.source, p.lexer.range(), "Multiple default clauses are not allowed");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
foundDefault = true;
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_colon);
|
||
} else {
|
||
try p.lexer.expect(.t_case);
|
||
value = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_colon);
|
||
}
|
||
|
||
caseBody: while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_close_brace, .t_case, .t_default => {
|
||
break :caseBody;
|
||
},
|
||
else => {
|
||
stmtOpts = ParseStatementOptions{ .lexical_decl = .allow_all };
|
||
try body.append(try p.parseStmt(&stmtOpts));
|
||
},
|
||
}
|
||
}
|
||
try cases.append(js_ast.Case{ .value = value, .body = body.items, .loc = logger.Loc.Empty });
|
||
}
|
||
try p.lexer.expect(.t_close_brace);
|
||
return p.s(S.Switch{ .test_ = test_, .body_loc = body_loc, .cases = cases.items }, loc);
|
||
},
|
||
.t_try => {
|
||
try p.lexer.next();
|
||
const body_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_open_brace);
|
||
_ = try p.pushScopeForParsePass(.block, loc);
|
||
var stmt_opts = ParseStatementOptions{};
|
||
const body = try p.parseStmtsUpTo(.t_close_brace, &stmt_opts);
|
||
p.popScope();
|
||
try p.lexer.next();
|
||
|
||
var catch_: ?js_ast.Catch = null;
|
||
var finally: ?js_ast.Finally = null;
|
||
|
||
if (p.lexer.token == .t_catch) {
|
||
const catch_loc = p.lexer.loc();
|
||
_ = try p.pushScopeForParsePass(.catch_binding, catch_loc);
|
||
try p.lexer.next();
|
||
var binding: ?js_ast.Binding = null;
|
||
|
||
// The catch binding is optional, and can be omitted
|
||
// jarred: TIL!
|
||
if (p.lexer.token != .t_open_brace) {
|
||
try p.lexer.expect(.t_open_paren);
|
||
var value = try p.parseBinding(.{});
|
||
|
||
// Skip over types
|
||
if (is_typescript_enabled and p.lexer.token == .t_colon) {
|
||
try p.lexer.expect(.t_colon);
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
// Bare identifiers are a special case
|
||
var kind = Symbol.Kind.other;
|
||
switch (value.data) {
|
||
.b_identifier => {
|
||
kind = .catch_identifier;
|
||
},
|
||
else => {},
|
||
}
|
||
try p.declareBinding(kind, &value, &stmt_opts);
|
||
binding = value;
|
||
}
|
||
|
||
const catch_body_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_open_brace);
|
||
|
||
_ = try p.pushScopeForParsePass(.block, catch_body_loc);
|
||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &stmt_opts);
|
||
p.popScope();
|
||
try p.lexer.next();
|
||
catch_ = js_ast.Catch{
|
||
.loc = catch_loc,
|
||
.binding = binding,
|
||
.body = stmts,
|
||
.body_loc = catch_body_loc,
|
||
};
|
||
p.popScope();
|
||
}
|
||
|
||
if (p.lexer.token == .t_finally or catch_ == null) {
|
||
const finally_loc = p.lexer.loc();
|
||
_ = try p.pushScopeForParsePass(.block, finally_loc);
|
||
try p.lexer.expect(.t_finally);
|
||
try p.lexer.expect(.t_open_brace);
|
||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &stmt_opts);
|
||
try p.lexer.next();
|
||
finally = js_ast.Finally{ .loc = finally_loc, .stmts = stmts };
|
||
p.popScope();
|
||
}
|
||
|
||
return p.s(
|
||
S.Try{ .body_loc = body_loc, .body = body, .catch_ = catch_, .finally = finally },
|
||
loc,
|
||
);
|
||
},
|
||
.t_for => {
|
||
_ = try p.pushScopeForParsePass(.block, loc);
|
||
defer p.popScope();
|
||
|
||
try p.lexer.next();
|
||
|
||
// "for await (let x of y) {}"
|
||
var isForAwait = p.lexer.isContextualKeyword("await");
|
||
if (isForAwait) {
|
||
const await_range = p.lexer.range();
|
||
if (p.fn_or_arrow_data_parse.allow_await != .allow_expr) {
|
||
try p.log.addRangeError(p.source, await_range, "Cannot use \"await\" outside an async function");
|
||
isForAwait = false;
|
||
} else {
|
||
// TODO: improve error handling here
|
||
// didGenerateError := p.markSyntaxFeature(compat.ForAwait, awaitRange)
|
||
if (p.fn_or_arrow_data_parse.is_top_level) {
|
||
p.top_level_await_keyword = await_range;
|
||
// p.markSyntaxFeature(compat.TopLevelAwait, awaitRange)
|
||
}
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.lexer.expect(.t_open_paren);
|
||
|
||
var init_: ?Stmt = null;
|
||
var test_: ?Expr = null;
|
||
var update: ?Expr = null;
|
||
|
||
// "in" expressions aren't allowed here
|
||
p.allow_in = false;
|
||
|
||
var bad_let_range: ?logger.Range = null;
|
||
if (p.lexer.isContextualKeyword("let")) {
|
||
bad_let_range = p.lexer.range();
|
||
}
|
||
|
||
var decls: G.Decl.List = .{};
|
||
const init_loc = p.lexer.loc();
|
||
var is_var = false;
|
||
switch (p.lexer.token) {
|
||
// for (var )
|
||
.t_var => {
|
||
is_var = true;
|
||
try p.lexer.next();
|
||
var stmtOpts = ParseStatementOptions{};
|
||
decls.update(try p.parseAndDeclareDecls(.hoisted, &stmtOpts));
|
||
init_ = p.s(S.Local{ .kind = .k_var, .decls = Decl.List.fromList(decls) }, init_loc);
|
||
},
|
||
// for (const )
|
||
.t_const => {
|
||
try p.lexer.next();
|
||
var stmtOpts = ParseStatementOptions{};
|
||
decls.update(try p.parseAndDeclareDecls(.constant, &stmtOpts));
|
||
init_ = p.s(S.Local{ .kind = .k_const, .decls = Decl.List.fromList(decls) }, init_loc);
|
||
},
|
||
// for (;)
|
||
.t_semicolon => {},
|
||
else => {
|
||
var stmtOpts = ParseStatementOptions{
|
||
.lexical_decl = .allow_all,
|
||
.is_for_loop_init = true,
|
||
};
|
||
|
||
const res = try p.parseExprOrLetStmt(&stmtOpts);
|
||
switch (res.stmt_or_expr) {
|
||
.stmt => |stmt| {
|
||
bad_let_range = null;
|
||
init_ = stmt;
|
||
},
|
||
.expr => |expr| {
|
||
init_ = p.s(S.SExpr{
|
||
.value = expr,
|
||
}, init_loc);
|
||
},
|
||
}
|
||
},
|
||
}
|
||
|
||
// "in" expressions are allowed again
|
||
p.allow_in = true;
|
||
|
||
// Detect for-of loops
|
||
if (p.lexer.isContextualKeyword("of") or isForAwait) {
|
||
if (bad_let_range) |r| {
|
||
try p.log.addRangeError(p.source, r, "\"let\" must be wrapped in parentheses to be used as an expression here");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
if (isForAwait and !p.lexer.isContextualKeyword("of")) {
|
||
if (init_ != null) {
|
||
try p.lexer.expectedString("\"of\"");
|
||
} else {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
}
|
||
|
||
try p.forbidInitializers(decls.slice(), "of", false);
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.comma);
|
||
try p.lexer.expect(.t_close_paren);
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
return p.s(S.ForOf{ .is_await = isForAwait, .init = init_ orelse unreachable, .value = value, .body = body }, loc);
|
||
}
|
||
|
||
// Detect for-in loops
|
||
if (p.lexer.token == .t_in) {
|
||
try p.forbidInitializers(decls.slice(), "in", is_var);
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.lowest);
|
||
try p.lexer.expect(.t_close_paren);
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
return p.s(S.ForIn{ .init = init_ orelse unreachable, .value = value, .body = body }, loc);
|
||
}
|
||
|
||
// Only require "const" statement initializers when we know we're a normal for loop
|
||
if (init_) |init_stmt| {
|
||
switch (init_stmt.data) {
|
||
.s_local => {
|
||
if (init_stmt.data.s_local.kind == .k_const) {
|
||
try p.requireInitializers(.k_const, decls.slice());
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
try p.lexer.expect(.t_semicolon);
|
||
if (p.lexer.token != .t_semicolon) {
|
||
test_ = try p.parseExpr(.lowest);
|
||
}
|
||
|
||
try p.lexer.expect(.t_semicolon);
|
||
|
||
if (p.lexer.token != .t_close_paren) {
|
||
update = try p.parseExpr(.lowest);
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const body = try p.parseStmt(&stmtOpts);
|
||
return p.s(
|
||
S.For{ .init = init_, .test_ = test_, .update = update, .body = body },
|
||
loc,
|
||
);
|
||
},
|
||
.t_import => {
|
||
const previous_import_keyword = p.esm_import_keyword;
|
||
p.esm_import_keyword = p.lexer.range();
|
||
try p.lexer.next();
|
||
var stmt: S.Import = S.Import{
|
||
.namespace_ref = Ref.None,
|
||
.import_record_index = std.math.maxInt(u32),
|
||
};
|
||
var was_originally_bare_import = false;
|
||
|
||
// "export import foo = bar"
|
||
if ((opts.is_export or (opts.is_namespace_scope and !opts.is_typescript_declare)) and p.lexer.token != .t_identifier) {
|
||
try p.lexer.expected(.t_identifier);
|
||
}
|
||
|
||
switch (p.lexer.token) {
|
||
// "import('path')"
|
||
// "import.meta"
|
||
.t_open_paren, .t_dot => {
|
||
p.esm_import_keyword = previous_import_keyword; // this wasn't an esm import statement after all
|
||
const expr = try p.parseSuffix(try p.parseImportExpr(loc, .lowest), .lowest, null, Expr.EFlags.none);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.SExpr{
|
||
.value = expr,
|
||
}, loc);
|
||
},
|
||
.t_string_literal, .t_no_substitution_template_literal => {
|
||
// "import 'path'"
|
||
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
was_originally_bare_import = true;
|
||
},
|
||
.t_asterisk => {
|
||
// "import * as ns from 'path'"
|
||
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
try p.lexer.expectContextualKeyword("as");
|
||
stmt = S.Import{
|
||
.namespace_ref = try p.storeNameInRef(p.lexer.identifier),
|
||
.star_name_loc = p.lexer.loc(),
|
||
.import_record_index = std.math.maxInt(u32),
|
||
};
|
||
try p.lexer.expect(.t_identifier);
|
||
try p.lexer.expectContextualKeyword("from");
|
||
},
|
||
.t_open_brace => {
|
||
// "import {item1, item2} from 'path'"
|
||
if (!opts.is_module_scope and (!opts.is_namespace_scope or !opts.is_typescript_declare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
const importClause = try p.parseImportClause();
|
||
if (comptime is_typescript_enabled) {
|
||
if (importClause.had_type_only_imports and importClause.items.len == 0) {
|
||
try p.lexer.expectContextualKeyword("from");
|
||
_ = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
|
||
stmt = S.Import{
|
||
.namespace_ref = Ref.None,
|
||
.import_record_index = std.math.maxInt(u32),
|
||
.items = importClause.items,
|
||
.is_single_line = importClause.is_single_line,
|
||
};
|
||
try p.lexer.expectContextualKeyword("from");
|
||
},
|
||
.t_identifier => {
|
||
// "import defaultItem from 'path'"
|
||
// "import foo = bar"
|
||
if (!opts.is_module_scope and (!opts.is_namespace_scope)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
var default_name = p.lexer.identifier;
|
||
stmt = S.Import{ .namespace_ref = Ref.None, .import_record_index = std.math.maxInt(u32), .default_name = LocRef{
|
||
.loc = p.lexer.loc(),
|
||
.ref = try p.storeNameInRef(default_name),
|
||
} };
|
||
try p.lexer.next();
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// Skip over type-only imports
|
||
if (strings.eqlComptime(default_name, "type")) {
|
||
switch (p.lexer.token) {
|
||
.t_identifier => {
|
||
if (!strings.eqlComptime(p.lexer.identifier, "from")) {
|
||
default_name = p.lexer.identifier;
|
||
stmt.default_name.?.loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_equals) {
|
||
// "import type foo = require('bar');"
|
||
// "import type foo = bar.baz;"
|
||
opts.is_typescript_declare = true;
|
||
return try p.parseTypeScriptImportEqualsStmt(loc, opts, stmt.default_name.?.loc, default_name);
|
||
} else {
|
||
// "import type foo from 'bar';"
|
||
try p.lexer.expectContextualKeyword("from");
|
||
_ = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
}
|
||
},
|
||
.t_asterisk => {
|
||
// "import type * as foo from 'bar';"
|
||
try p.lexer.next();
|
||
try p.lexer.expectContextualKeyword("as");
|
||
try p.lexer.expect(.t_identifier);
|
||
try p.lexer.expectContextualKeyword("from");
|
||
_ = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.TypeScript{}, loc);
|
||
},
|
||
|
||
.t_open_brace => {
|
||
// "import type {foo} from 'bar';"
|
||
_ = try p.parseImportClause();
|
||
try p.lexer.expectContextualKeyword("from");
|
||
_ = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.TypeScript{}, loc);
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// Parse TypeScript import assignment statements
|
||
if (p.lexer.token == .t_equals or opts.is_export or (opts.is_namespace_scope and !opts.is_typescript_declare)) {
|
||
p.esm_import_keyword = previous_import_keyword; // This wasn't an ESM import statement after all;
|
||
return p.parseTypeScriptImportEqualsStmt(loc, opts, logger.Loc.Empty, default_name);
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token == .t_comma) {
|
||
try p.lexer.next();
|
||
|
||
switch (p.lexer.token) {
|
||
// "import defaultItem, * as ns from 'path'"
|
||
.t_asterisk => {
|
||
try p.lexer.next();
|
||
try p.lexer.expectContextualKeyword("as");
|
||
stmt.namespace_ref = try p.storeNameInRef(p.lexer.identifier);
|
||
stmt.star_name_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_identifier);
|
||
},
|
||
// "import defaultItem, {item1, item2} from 'path'"
|
||
.t_open_brace => {
|
||
const importClause = try p.parseImportClause();
|
||
|
||
stmt.items = importClause.items;
|
||
stmt.is_single_line = importClause.is_single_line;
|
||
},
|
||
else => {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
}
|
||
}
|
||
|
||
try p.lexer.expectContextualKeyword("from");
|
||
},
|
||
else => {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
}
|
||
|
||
const path = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
return try p.processImportStatement(stmt, path, loc, was_originally_bare_import);
|
||
},
|
||
.t_break => {
|
||
try p.lexer.next();
|
||
const name = try p.parseLabelName();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.Break{ .label = name }, loc);
|
||
},
|
||
.t_continue => {
|
||
try p.lexer.next();
|
||
const name = try p.parseLabelName();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.Continue{ .label = name }, loc);
|
||
},
|
||
.t_return => {
|
||
if (p.fn_or_arrow_data_parse.is_return_disallowed) {
|
||
try p.log.addRangeError(p.source, p.lexer.range(), "A return statement cannot be used here");
|
||
}
|
||
try p.lexer.next();
|
||
var value: ?Expr = null;
|
||
if ((p.lexer.token != .t_semicolon and
|
||
!p.lexer.has_newline_before and
|
||
p.lexer.token != .t_close_brace and
|
||
p.lexer.token != .t_end_of_file))
|
||
{
|
||
value = try p.parseExpr(.lowest);
|
||
}
|
||
p.latest_return_had_semicolon = p.lexer.token == .t_semicolon;
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
return p.s(S.Return{ .value = value }, loc);
|
||
},
|
||
.t_throw => {
|
||
try p.lexer.next();
|
||
if (p.lexer.has_newline_before) {
|
||
try p.log.addError(p.source, logger.Loc{
|
||
.start = loc.start + 5,
|
||
}, "Unexpected newline after \"throw\"");
|
||
return error.SyntaxError;
|
||
}
|
||
const expr = try p.parseExpr(.lowest);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.Throw{ .value = expr }, loc);
|
||
},
|
||
.t_debugger => {
|
||
try p.lexer.next();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.Debugger{}, loc);
|
||
},
|
||
.t_open_brace => {
|
||
_ = try p.pushScopeForParsePass(.block, loc);
|
||
defer p.popScope();
|
||
try p.lexer.next();
|
||
var stmtOpts = ParseStatementOptions{};
|
||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &stmtOpts);
|
||
const close_brace_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
return p.s(S.Block{
|
||
.stmts = stmts,
|
||
.close_brace_loc = close_brace_loc,
|
||
}, loc);
|
||
},
|
||
|
||
else => {
|
||
const is_identifier = p.lexer.token == .t_identifier;
|
||
const name = p.lexer.identifier;
|
||
// Parse either an async function, an async expression, or a normal expression
|
||
var expr: Expr = Expr{ .loc = loc, .data = Expr.Data{ .e_missing = .{} } };
|
||
if (is_identifier and strings.eqlComptime(p.lexer.raw(), "async")) {
|
||
const async_range = p.lexer.range();
|
||
try p.lexer.next();
|
||
if (p.lexer.token == .t_function and !p.lexer.has_newline_before) {
|
||
try p.lexer.next();
|
||
|
||
return try p.parseFnStmt(async_range.loc, opts, async_range);
|
||
}
|
||
|
||
expr = try p.parseSuffix(try p.parseAsyncPrefixExpr(async_range, .lowest), .lowest, null, Expr.EFlags.none);
|
||
} else {
|
||
const exprOrLet = try p.parseExprOrLetStmt(opts);
|
||
switch (exprOrLet.stmt_or_expr) {
|
||
.stmt => |stmt| {
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return stmt;
|
||
},
|
||
.expr => |_expr| {
|
||
expr = _expr;
|
||
},
|
||
}
|
||
}
|
||
if (is_identifier) {
|
||
switch (expr.data) {
|
||
.e_identifier => |ident| {
|
||
if (p.lexer.token == .t_colon and !opts.hasDecorators()) {
|
||
_ = try p.pushScopeForParsePass(.label, loc);
|
||
defer p.popScope();
|
||
|
||
// Parse a labeled statement
|
||
try p.lexer.next();
|
||
|
||
const _name = LocRef{ .loc = expr.loc, .ref = ident.ref };
|
||
var nestedOpts = ParseStatementOptions{};
|
||
|
||
switch (opts.lexical_decl) {
|
||
.allow_all, .allow_fn_inside_label => {
|
||
nestedOpts.lexical_decl = .allow_fn_inside_label;
|
||
},
|
||
else => {},
|
||
}
|
||
const stmt = try p.parseStmt(&nestedOpts);
|
||
return p.s(S.Label{ .name = _name, .stmt = stmt }, loc);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (is_typescript_enabled) {
|
||
if (js_lexer.TypescriptStmtKeyword.List.get(name)) |ts_stmt| {
|
||
switch (ts_stmt) {
|
||
.ts_stmt_type => {
|
||
if (p.lexer.token == .t_identifier and !p.lexer.has_newline_before) {
|
||
// "type Foo = any"
|
||
var stmtOpts = ParseStatementOptions{ .is_module_scope = opts.is_module_scope };
|
||
try p.skipTypeScriptTypeStmt(&stmtOpts);
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
},
|
||
.ts_stmt_namespace, .ts_stmt_module => {
|
||
// "namespace Foo {}"
|
||
// "module Foo {}"
|
||
// "declare module 'fs' {}"
|
||
// "declare module 'fs';"
|
||
if (((opts.is_module_scope or opts.is_namespace_scope) and (p.lexer.token == .t_identifier or
|
||
(p.lexer.token == .t_string_literal and opts.is_typescript_declare))))
|
||
{
|
||
return p.parseTypeScriptNamespaceStmt(loc, opts);
|
||
}
|
||
},
|
||
.ts_stmt_interface => {
|
||
// "interface Foo {}"
|
||
var stmtOpts = ParseStatementOptions{ .is_module_scope = opts.is_module_scope };
|
||
|
||
try p.skipTypeScriptInterfaceStmt(&stmtOpts);
|
||
return p.s(S.TypeScript{}, loc);
|
||
},
|
||
.ts_stmt_abstract => {
|
||
if (p.lexer.token == .t_class or opts.ts_decorators != null) {
|
||
return try p.parseClassStmt(loc, opts);
|
||
}
|
||
},
|
||
.ts_stmt_global => {
|
||
// "declare module 'fs' { global { namespace NodeJS {} } }"
|
||
if (opts.is_namespace_scope and opts.is_typescript_declare and p.lexer.token == .t_open_brace) {
|
||
try p.lexer.next();
|
||
_ = try p.parseStmtsUpTo(.t_close_brace, opts);
|
||
try p.lexer.next();
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
},
|
||
.ts_stmt_declare => {
|
||
opts.lexical_decl = .allow_all;
|
||
opts.is_typescript_declare = true;
|
||
|
||
// "@decorator declare class Foo {}"
|
||
// "@decorator declare abstract class Foo {}"
|
||
if (opts.ts_decorators != null and p.lexer.token != .t_class and !p.lexer.isContextualKeyword("abstract")) {
|
||
try p.lexer.expected(.t_class);
|
||
}
|
||
|
||
// "declare global { ... }"
|
||
if (p.lexer.isContextualKeyword("global")) {
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_open_brace);
|
||
_ = try p.parseStmtsUpTo(.t_close_brace, opts);
|
||
try p.lexer.next();
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
|
||
// "declare const x: any"
|
||
const stmt = try p.parseStmt(opts);
|
||
if (opts.ts_decorators) |decs| {
|
||
p.discardScopesUpTo(decs.scope_index);
|
||
}
|
||
|
||
// Unlike almost all uses of "declare", statements that use
|
||
// "export declare" with "var/let/const" inside a namespace affect
|
||
// code generation. They cause any declared bindings to be
|
||
// considered exports of the namespace. Identifier references to
|
||
// those names must be converted into property accesses off the
|
||
// namespace object:
|
||
//
|
||
// namespace ns {
|
||
// export declare const x
|
||
// export function y() { return x }
|
||
// }
|
||
//
|
||
// (ns as any).x = 1
|
||
// console.log(ns.y())
|
||
//
|
||
// In this example, "return x" must be replaced with "return ns.x".
|
||
// This is handled by replacing each "export declare" statement
|
||
// inside a namespace with an "export var" statement containing all
|
||
// of the declared bindings. That "export var" statement will later
|
||
// cause identifiers to be transformed into property accesses.
|
||
if (opts.is_namespace_scope and opts.is_export) {
|
||
var decls: G.Decl.List = .{};
|
||
switch (stmt.data) {
|
||
.s_local => |local| {
|
||
var _decls = try ListManaged(G.Decl).initCapacity(p.allocator, local.decls.len);
|
||
for (local.decls.slice()) |decl| {
|
||
try extractDeclsForBinding(decl.binding, &_decls);
|
||
}
|
||
decls.update(_decls);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (decls.len > 0) {
|
||
return p.s(S.Local{
|
||
.kind = .k_var,
|
||
.is_export = true,
|
||
.decls = decls,
|
||
}, loc);
|
||
}
|
||
}
|
||
|
||
return p.s(S.TypeScript{}, loc);
|
||
},
|
||
}
|
||
}
|
||
}
|
||
}
|
||
// Output.print("\n\nmVALUE {s}:{s}\n", .{ expr, name });
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return p.s(S.SExpr{ .value = expr }, loc);
|
||
},
|
||
}
|
||
|
||
return js_ast.Stmt.empty();
|
||
}
|
||
|
||
fn discardScopesUpTo(p: *P, scope_index: usize) void {
|
||
// Remove any direct children from their parent
|
||
const scope = p.current_scope;
|
||
var children = scope.children;
|
||
defer scope.children = children;
|
||
|
||
for (p.scopes_in_order.items[scope_index..]) |_child| {
|
||
const child = _child orelse continue;
|
||
|
||
if (child.scope.parent == p.current_scope) {
|
||
var i: usize = children.len - 1;
|
||
while (i >= 0) {
|
||
if (children.mut(i).* == child.scope) {
|
||
_ = children.orderedRemove(i);
|
||
break;
|
||
}
|
||
i -= 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Truncate the scope order where we started to pretend we never saw this scope
|
||
p.scopes_in_order.shrinkRetainingCapacity(scope_index);
|
||
}
|
||
|
||
fn skipTypeScriptTypeStmt(p: *P, opts: *ParseStatementOptions) anyerror!void {
|
||
if (opts.is_export) {
|
||
switch (p.lexer.token) {
|
||
.t_open_brace => {
|
||
// "export type {foo}"
|
||
// "export type {foo} from 'bar'"
|
||
_ = try p.parseExportClause();
|
||
if (p.lexer.isContextualKeyword("from")) {
|
||
try p.lexer.next();
|
||
_ = try p.parsePath();
|
||
}
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return;
|
||
},
|
||
.t_asterisk => {
|
||
// https://github.com/microsoft/TypeScript/pull/52217
|
||
// - export type * as Foo from 'bar';
|
||
// - export type Foo from 'bar';
|
||
try p.lexer.next();
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
// "export type * as ns from 'path'"
|
||
try p.lexer.next();
|
||
_ = try p.parseClauseAlias("export");
|
||
try p.lexer.next();
|
||
}
|
||
try p.lexer.expectContextualKeyword("from");
|
||
_ = try p.parsePath();
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
return;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
const name = p.lexer.identifier;
|
||
try p.lexer.expect(.t_identifier);
|
||
|
||
if (opts.is_module_scope) {
|
||
p.local_type_names.put(p.allocator, name, true) catch unreachable;
|
||
}
|
||
|
||
_ = try p.skipTypeScriptTypeParameters(.{
|
||
.allow_in_out_variance_annotations = true,
|
||
.allow_empty_type_parameters = true,
|
||
});
|
||
|
||
try p.lexer.expect(.t_equals);
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
}
|
||
|
||
fn parseTypeScriptNamespaceStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||
// "namespace foo {}";
|
||
const name_loc = p.lexer.loc();
|
||
const name_text = p.lexer.identifier;
|
||
try p.lexer.next();
|
||
|
||
// Generate the namespace object
|
||
const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, false);
|
||
const exported_members = ts_namespace.exported_members;
|
||
const ns_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members };
|
||
|
||
// Declare the namespace and create the scope
|
||
var name = LocRef{ .loc = name_loc, .ref = null };
|
||
const scope_index = try p.pushScopeForParsePass(.entry, loc);
|
||
p.current_scope.ts_namespace = ts_namespace;
|
||
|
||
const old_has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace;
|
||
p.has_non_local_export_declare_inside_namespace = false;
|
||
|
||
// Parse the statements inside the namespace
|
||
var stmts: ListManaged(Stmt) = ListManaged(Stmt).init(p.allocator);
|
||
if (p.lexer.token == .t_dot) {
|
||
const dot_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
var _opts = ParseStatementOptions{
|
||
.is_export = true,
|
||
.is_namespace_scope = true,
|
||
.is_typescript_declare = opts.is_typescript_declare,
|
||
};
|
||
stmts.append(try p.parseTypeScriptNamespaceStmt(dot_loc, &_opts)) catch unreachable;
|
||
} else if (opts.is_typescript_declare and p.lexer.token != .t_open_brace) {
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
} else {
|
||
try p.lexer.expect(.t_open_brace);
|
||
var _opts = ParseStatementOptions{
|
||
.is_namespace_scope = true,
|
||
.is_typescript_declare = opts.is_typescript_declare,
|
||
};
|
||
stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, try p.parseStmtsUpTo(.t_close_brace, &_opts));
|
||
try p.lexer.next();
|
||
}
|
||
const has_non_local_export_declare_inside_namespace = p.has_non_local_export_declare_inside_namespace;
|
||
p.has_non_local_export_declare_inside_namespace = old_has_non_local_export_declare_inside_namespace;
|
||
|
||
// Add any exported members from this namespace's body as members of the
|
||
// associated namespace object.
|
||
for (stmts.items) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_function => |func| {
|
||
if (func.func.flags.contains(.is_export)) {
|
||
const locref = func.func.name.?;
|
||
const fn_name = p.symbols.items[locref.ref.?.inner_index].original_name;
|
||
try exported_members.put(p.allocator, fn_name, .{
|
||
.loc = locref.loc,
|
||
.data = .property,
|
||
});
|
||
try p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
locref.ref.?,
|
||
.property,
|
||
);
|
||
}
|
||
},
|
||
.s_class => |class| {
|
||
if (class.is_export) {
|
||
const locref = class.class.class_name.?;
|
||
const class_name = p.symbols.items[locref.ref.?.inner_index].original_name;
|
||
try exported_members.put(p.allocator, class_name, .{
|
||
.loc = locref.loc,
|
||
.data = .property,
|
||
});
|
||
try p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
locref.ref.?,
|
||
.property,
|
||
);
|
||
}
|
||
},
|
||
inline .s_namespace, .s_enum => |ns| {
|
||
if (ns.is_export) {
|
||
if (p.ref_to_ts_namespace_member.get(ns.name.ref.?)) |member_data| {
|
||
try exported_members.put(
|
||
p.allocator,
|
||
p.symbols.items[ns.name.ref.?.inner_index].original_name,
|
||
.{
|
||
.data = member_data,
|
||
.loc = ns.name.loc,
|
||
},
|
||
);
|
||
try p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
ns.name.ref.?,
|
||
member_data,
|
||
);
|
||
}
|
||
}
|
||
},
|
||
.s_local => |local| {
|
||
if (local.is_export) {
|
||
for (local.decls.slice()) |decl| {
|
||
try p.defineExportedNamespaceBinding(
|
||
exported_members,
|
||
decl.binding,
|
||
);
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// Import assignments may be only used in type expressions, not value
|
||
// expressions. If this is the case, the TypeScript compiler removes
|
||
// them entirely from the output. That can cause the namespace itself
|
||
// to be considered empty and thus be removed.
|
||
var import_equal_count: usize = 0;
|
||
for (stmts.items) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_local => |local| {
|
||
if (local.was_ts_import_equals and !local.is_export) {
|
||
import_equal_count += 1;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// TypeScript omits namespaces without values. These namespaces
|
||
// are only allowed to be used in type expressions. They are
|
||
// allowed to be exported, but can also only be used in type
|
||
// expressions when imported. So we shouldn't count them as a
|
||
// real export either.
|
||
//
|
||
// TypeScript also strangely counts namespaces containing only
|
||
// "export declare" statements as non-empty even though "declare"
|
||
// statements are only type annotations. We cannot omit the namespace
|
||
// in that case. See https://github.com/evanw/esbuild/issues/1158.
|
||
if ((stmts.items.len == import_equal_count and !has_non_local_export_declare_inside_namespace) or opts.is_typescript_declare) {
|
||
p.popAndDiscardScope(scope_index);
|
||
if (opts.is_module_scope) {
|
||
p.local_type_names.put(p.allocator, name_text, true) catch unreachable;
|
||
}
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
|
||
var arg_ref = Ref.None;
|
||
if (!opts.is_typescript_declare) {
|
||
// Avoid a collision with the namespace closure argument variable if the
|
||
// namespace exports a symbol with the same name as the namespace itself:
|
||
//
|
||
// namespace foo {
|
||
// export let foo = 123
|
||
// console.log(foo)
|
||
// }
|
||
//
|
||
// TypeScript generates the following code in this case:
|
||
//
|
||
// var foo;
|
||
// (function (foo_1) {
|
||
// foo_1.foo = 123;
|
||
// console.log(foo_1.foo);
|
||
// })(foo || (foo = {}));
|
||
//
|
||
if (p.current_scope.members.contains(name_text)) {
|
||
// Add a "_" to make tests easier to read, since non-bundler tests don't
|
||
// run the renamer. For external-facing things the renamer will avoid
|
||
// collisions automatically so this isn't important for correctness.
|
||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||
} else {
|
||
arg_ref = p.newSymbol(.hoisted, name_text) catch unreachable;
|
||
}
|
||
ts_namespace.arg_ref = arg_ref;
|
||
}
|
||
p.popScope();
|
||
|
||
if (!opts.is_typescript_declare) {
|
||
name.ref = p.declareSymbol(.ts_namespace, name_loc, name_text) catch bun.outOfMemory();
|
||
try p.ref_to_ts_namespace_member.put(p.allocator, name.ref.?, ns_member_data);
|
||
}
|
||
|
||
return p.s(S.Namespace{
|
||
.name = name,
|
||
.arg = arg_ref,
|
||
.stmts = stmts.items,
|
||
.is_export = opts.is_export,
|
||
}, loc);
|
||
}
|
||
|
||
fn defineExportedNamespaceBinding(
|
||
p: *P,
|
||
exported_members: *js_ast.TSNamespaceMemberMap,
|
||
binding: Binding,
|
||
) !void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |id| {
|
||
const name = p.symbols.items[id.ref.inner_index].original_name;
|
||
try exported_members.put(p.allocator, name, .{
|
||
.loc = binding.loc,
|
||
.data = .property,
|
||
});
|
||
try p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
id.ref,
|
||
.property,
|
||
);
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |prop| {
|
||
try p.defineExportedNamespaceBinding(exported_members, prop.value);
|
||
}
|
||
},
|
||
.b_array => |obj| {
|
||
for (obj.items) |prop| {
|
||
try p.defineExportedNamespaceBinding(exported_members, prop.binding);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
fn skipTypeScriptInterfaceStmt(p: *P, opts: *ParseStatementOptions) anyerror!void {
|
||
const name = p.lexer.identifier;
|
||
try p.lexer.expect(.t_identifier);
|
||
|
||
if (opts.is_module_scope) {
|
||
p.local_type_names.put(p.allocator, name, true) catch unreachable;
|
||
}
|
||
|
||
_ = try p.skipTypeScriptTypeParameters(.{
|
||
.allow_in_out_variance_annotations = true,
|
||
.allow_empty_type_parameters = true,
|
||
});
|
||
|
||
if (p.lexer.token == .t_extends) {
|
||
try p.lexer.next();
|
||
|
||
while (true) {
|
||
try p.skipTypeScriptType(.lowest);
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
|
||
if (p.lexer.isContextualKeyword("implements")) {
|
||
try p.lexer.next();
|
||
while (true) {
|
||
try p.skipTypeScriptType(.lowest);
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
|
||
try p.skipTypeScriptObjectType();
|
||
}
|
||
|
||
fn parseTypeScriptImportEqualsStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions, default_name_loc: logger.Loc, default_name: string) anyerror!Stmt {
|
||
try p.lexer.expect(.t_equals);
|
||
|
||
const kind = S.Local.Kind.k_const;
|
||
const name = p.lexer.identifier;
|
||
const target = p.newExpr(E.Identifier{ .ref = p.storeNameInRef(name) catch unreachable }, p.lexer.loc());
|
||
var value = target;
|
||
try p.lexer.expect(.t_identifier);
|
||
|
||
if (strings.eqlComptime(name, "require") and p.lexer.token == .t_open_paren) {
|
||
// "import ns = require('x')"
|
||
try p.lexer.next();
|
||
const path = p.newExpr(try p.lexer.toEString(), p.lexer.loc());
|
||
try p.lexer.expect(.t_string_literal);
|
||
try p.lexer.expect(.t_close_paren);
|
||
if (!opts.is_typescript_declare) {
|
||
const args = try ExprNodeList.one(p.allocator, path);
|
||
value = p.newExpr(E.Call{ .target = target, .close_paren_loc = p.lexer.loc(), .args = args }, loc);
|
||
}
|
||
} else {
|
||
// "import Foo = Bar"
|
||
// "import Foo = Bar.Baz"
|
||
var prev_value = value;
|
||
while (p.lexer.token == .t_dot) : (prev_value = value) {
|
||
try p.lexer.next();
|
||
value = p.newExpr(E.Dot{ .target = prev_value, .name = p.lexer.identifier, .name_loc = p.lexer.loc() }, loc);
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
}
|
||
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
if (opts.is_typescript_declare) {
|
||
// "import type foo = require('bar');"
|
||
// "import type foo = bar.baz;"
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
|
||
const ref = p.declareSymbol(.constant, default_name_loc, default_name) catch unreachable;
|
||
var decls = p.allocator.alloc(Decl, 1) catch unreachable;
|
||
decls[0] = Decl{
|
||
.binding = p.b(B.Identifier{ .ref = ref }, default_name_loc),
|
||
.value = value,
|
||
};
|
||
return p.s(S.Local{ .kind = kind, .decls = Decl.List.init(decls), .is_export = opts.is_export, .was_ts_import_equals = true }, loc);
|
||
}
|
||
|
||
fn parseClauseAlias(p: *P, kind: string) !string {
|
||
const loc = p.lexer.loc();
|
||
|
||
// The alias may now be a utf-16 (not wtf-16) string (see https://github.com/tc39/ecma262/pull/2154)
|
||
if (p.lexer.token == .t_string_literal) {
|
||
var estr = try p.lexer.toEString();
|
||
if (estr.isUTF8()) {
|
||
return estr.slice8();
|
||
} else if (strings.toUTF8AllocWithTypeWithoutInvalidSurrogatePairs(p.lexer.allocator, []const u16, estr.slice16())) |alias_utf8| {
|
||
return alias_utf8;
|
||
} else |err| {
|
||
const r = p.source.rangeOfString(loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Invalid {s} alias because it contains an unpaired Unicode surrogate ({s})", .{ kind, @errorName(err) });
|
||
return p.source.textForRange(r);
|
||
}
|
||
}
|
||
|
||
// The alias may be a keyword
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
const alias = p.lexer.identifier;
|
||
p.checkForNonBMPCodePoint(loc, alias);
|
||
return alias;
|
||
}
|
||
|
||
fn parseImportClause(
|
||
p: *P,
|
||
) !ImportClause {
|
||
var items = ListManaged(js_ast.ClauseItem).init(p.allocator);
|
||
try p.lexer.expect(.t_open_brace);
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
// this variable should not exist if we're not in a typescript file
|
||
var had_type_only_imports = if (comptime is_typescript_enabled)
|
||
false
|
||
else {};
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
// The alias may be a keyword;
|
||
const isIdentifier = p.lexer.token == .t_identifier;
|
||
const alias_loc = p.lexer.loc();
|
||
const alias = try p.parseClauseAlias("import");
|
||
var name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(alias) };
|
||
var original_name = alias;
|
||
try p.lexer.next();
|
||
|
||
const probably_type_only_import = if (comptime is_typescript_enabled)
|
||
strings.eqlComptime(alias, "type") and
|
||
p.lexer.token != .t_comma and
|
||
p.lexer.token != .t_close_brace
|
||
else
|
||
false;
|
||
|
||
// "import { type xx } from 'mod'"
|
||
// "import { type xx as yy } from 'mod'"
|
||
// "import { type 'xx' as yy } from 'mod'"
|
||
// "import { type as } from 'mod'"
|
||
// "import { type as as } from 'mod'"
|
||
// "import { type as as as } from 'mod'"
|
||
if (probably_type_only_import) {
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
original_name = p.lexer.identifier;
|
||
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_identifier) {
|
||
|
||
// "import { type as as as } from 'mod'"
|
||
// "import { type as as foo } from 'mod'"
|
||
had_type_only_imports = true;
|
||
try p.lexer.next();
|
||
} else {
|
||
// "import { type as as } from 'mod'"
|
||
|
||
try items.append(.{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
});
|
||
}
|
||
} else if (p.lexer.token == .t_identifier) {
|
||
had_type_only_imports = true;
|
||
|
||
// "import { type as xxx } from 'mod'"
|
||
original_name = p.lexer.identifier;
|
||
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
|
||
try p.lexer.expect(.t_identifier);
|
||
|
||
if (isEvalOrArguments(original_name)) {
|
||
const r = p.source.rangeOfString(name.loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {s} as an identifier here", .{original_name});
|
||
}
|
||
|
||
try items.append(.{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
});
|
||
}
|
||
} else {
|
||
const is_identifier = p.lexer.token == .t_identifier;
|
||
|
||
// "import { type xx } from 'mod'"
|
||
// "import { type xx as yy } from 'mod'"
|
||
// "import { type if as yy } from 'mod'"
|
||
// "import { type 'xx' as yy } from 'mod'"
|
||
_ = try p.parseClauseAlias("import");
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
|
||
try p.lexer.expect(.t_identifier);
|
||
} else if (!is_identifier) {
|
||
// An import where the name is a keyword must have an alias
|
||
try p.lexer.expectedString("\"as\"");
|
||
}
|
||
had_type_only_imports = true;
|
||
}
|
||
} else {
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
original_name = p.lexer.identifier;
|
||
name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(original_name) };
|
||
try p.lexer.expect(.t_identifier);
|
||
} else if (!isIdentifier) {
|
||
// An import where the name is a keyword must have an alias
|
||
try p.lexer.expectedString("\"as\"");
|
||
}
|
||
|
||
// Reject forbidden names
|
||
if (isEvalOrArguments(original_name)) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, name.loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name});
|
||
}
|
||
|
||
try items.append(js_ast.ClauseItem{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
});
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_brace);
|
||
return ImportClause{
|
||
.items = items.items,
|
||
.is_single_line = is_single_line,
|
||
.had_type_only_imports = if (comptime is_typescript_enabled)
|
||
had_type_only_imports
|
||
else
|
||
false,
|
||
};
|
||
}
|
||
|
||
fn forbidInitializers(p: *P, decls: []G.Decl, comptime loop_type: string, is_var: bool) anyerror!void {
|
||
switch (decls.len) {
|
||
0 => {},
|
||
1 => {
|
||
if (decls[0].value) |value| {
|
||
if (is_var) {
|
||
|
||
// This is a weird special case. Initializers are allowed in "var"
|
||
// statements with identifier bindings.
|
||
return;
|
||
}
|
||
|
||
try p.log.addError(p.source, value.loc, comptime std.fmt.comptimePrint("for-{s} loop variables cannot have an initializer", .{loop_type}));
|
||
}
|
||
},
|
||
else => {
|
||
try p.log.addError(p.source, decls[0].binding.loc, comptime std.fmt.comptimePrint("for-{s} loops must have a single declaration", .{loop_type}));
|
||
},
|
||
}
|
||
}
|
||
|
||
fn parseExprOrLetStmt(p: *P, opts: *ParseStatementOptions) !ExprOrLetStmt {
|
||
const token_range = p.lexer.range();
|
||
|
||
if (p.lexer.token != .t_identifier) {
|
||
return ExprOrLetStmt{ .stmt_or_expr = js_ast.StmtOrExpr{ .expr = try p.parseExpr(.lowest) } };
|
||
}
|
||
|
||
const raw = p.lexer.raw();
|
||
if (strings.eqlComptime(raw, "let")) {
|
||
try p.lexer.next();
|
||
|
||
switch (p.lexer.token) {
|
||
.t_identifier, .t_open_bracket, .t_open_brace => {
|
||
if (opts.lexical_decl == .allow_all or !p.lexer.has_newline_before or p.lexer.token == .t_open_bracket) {
|
||
if (opts.lexical_decl != .allow_all) {
|
||
try p.forbidLexicalDecl(token_range.loc);
|
||
}
|
||
|
||
const decls = try p.parseAndDeclareDecls(.other, opts);
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.stmt = p.s(S.Local{
|
||
.kind = .k_let,
|
||
.decls = G.Decl.List.fromList(decls),
|
||
.is_export = opts.is_export,
|
||
}, token_range.loc),
|
||
},
|
||
.decls = decls.items,
|
||
};
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
} else if (strings.eqlComptime(raw, "using")) {
|
||
// Handle an "using" declaration
|
||
if (opts.is_export) {
|
||
try p.log.addError(p.source, token_range.loc, "Cannot use \"export\" with a \"using\" declaration");
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token == .t_identifier and !p.lexer.has_newline_before) {
|
||
if (opts.lexical_decl != .allow_all) {
|
||
try p.forbidLexicalDecl(token_range.loc);
|
||
}
|
||
// p.markSyntaxFeature(.using, token_range.loc);
|
||
opts.is_using_statement = true;
|
||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||
if (!opts.is_for_loop_init) {
|
||
try p.requireInitializers(.k_using, decls.items);
|
||
}
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.stmt = p.s(S.Local{
|
||
.kind = .k_using,
|
||
.decls = G.Decl.List.fromList(decls),
|
||
.is_export = false,
|
||
}, token_range.loc),
|
||
},
|
||
.decls = decls.items,
|
||
};
|
||
}
|
||
} else if (p.fn_or_arrow_data_parse.allow_await == .allow_expr and strings.eqlComptime(raw, "await")) {
|
||
// Handle an "await using" declaration
|
||
if (opts.is_export) {
|
||
try p.log.addError(p.source, token_range.loc, "Cannot use \"export\" with an \"await using\" declaration");
|
||
}
|
||
|
||
if (p.fn_or_arrow_data_parse.is_top_level) {
|
||
p.top_level_await_keyword = token_range;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
const raw2 = p.lexer.raw();
|
||
const value = if (p.lexer.token == .t_identifier and strings.eqlComptime(raw2, "using")) value: {
|
||
// const using_loc = p.saveExprCommentsHere();
|
||
const using_range = p.lexer.range();
|
||
try p.lexer.next();
|
||
if (p.lexer.token == .t_identifier and !p.lexer.has_newline_before) {
|
||
// It's an "await using" declaration if we get here
|
||
if (opts.lexical_decl != .allow_all) {
|
||
try p.forbidLexicalDecl(using_range.loc);
|
||
}
|
||
// p.markSyntaxFeature(.using, using_range.loc);
|
||
opts.is_using_statement = true;
|
||
const decls = try p.parseAndDeclareDecls(.constant, opts);
|
||
if (!opts.is_for_loop_init) {
|
||
try p.requireInitializers(.k_await_using, decls.items);
|
||
}
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.stmt = p.s(S.Local{
|
||
.kind = .k_await_using,
|
||
.decls = G.Decl.List.fromList(decls),
|
||
.is_export = false,
|
||
}, token_range.loc),
|
||
},
|
||
.decls = decls.items,
|
||
};
|
||
}
|
||
break :value Expr{
|
||
.data = .{ .e_identifier = .{ .ref = try p.storeNameInRef(raw) } },
|
||
// TODO: implement saveExprCommentsHere and use using_loc here
|
||
.loc = using_range.loc,
|
||
};
|
||
} else try p.parseExpr(.prefix);
|
||
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
}
|
||
const expr = p.newExpr(
|
||
E.Await{ .value = try p.parseSuffix(value, .prefix, null, .none) },
|
||
token_range.loc,
|
||
);
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.expr = try p.parseSuffix(expr, .lowest, null, .none),
|
||
},
|
||
};
|
||
} else {
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.expr = try p.parseExpr(.lowest),
|
||
},
|
||
};
|
||
}
|
||
|
||
// Parse the remainder of this expression that starts with an identifier
|
||
const ref = try p.storeNameInRef(raw);
|
||
const expr = p.newExpr(E.Identifier{ .ref = ref }, token_range.loc);
|
||
return ExprOrLetStmt{
|
||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||
.expr = try p.parseSuffix(expr, .lowest, null, .none),
|
||
},
|
||
};
|
||
}
|
||
|
||
fn requireInitializers(p: *P, comptime kind: S.Local.Kind, decls: []G.Decl) anyerror!void {
|
||
const what = switch (kind) {
|
||
.k_await_using, .k_using => "declaration",
|
||
.k_const => "constant",
|
||
else => @compileError("unreachable"),
|
||
};
|
||
|
||
for (decls) |decl| {
|
||
if (decl.value == null) {
|
||
switch (decl.binding.data) {
|
||
.b_identifier => |ident| {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, decl.binding.loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "The " ++ what ++ " \"{s}\" must be initialized", .{
|
||
p.symbols.items[ident.ref.innerIndex()].original_name,
|
||
});
|
||
// return;/
|
||
},
|
||
else => {
|
||
try p.log.addError(p.source, decl.binding.loc, "This " ++ what ++ " must be initialized");
|
||
},
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
const ParseBindingOptions = struct {
|
||
/// This will prevent parsing of destructuring patterns, as using statement
|
||
/// is only allowed to be `using name, name2, name3`, nothing special.
|
||
is_using_statement: bool = false,
|
||
};
|
||
|
||
fn parseBinding(p: *P, comptime opts: ParseBindingOptions) anyerror!Binding {
|
||
const loc = p.lexer.loc();
|
||
|
||
switch (p.lexer.token) {
|
||
.t_identifier => {
|
||
const name = p.lexer.identifier;
|
||
if ((p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(name, "await")) or (p.fn_or_arrow_data_parse.allow_yield != .allow_ident and strings.eqlComptime(name, "yield"))) {
|
||
// TODO: add fmt to addRangeError
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"yield\" or \"await\" here.") catch unreachable;
|
||
}
|
||
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
try p.lexer.next();
|
||
return p.b(B.Identifier{ .ref = ref }, loc);
|
||
},
|
||
.t_open_bracket => {
|
||
if (!opts.is_using_statement) {
|
||
try p.lexer.next();
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
var items = ListManaged(js_ast.ArrayBinding).init(p.allocator);
|
||
var has_spread = false;
|
||
|
||
// "in" expressions are allowed
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
while (p.lexer.token != .t_close_bracket) {
|
||
if (p.lexer.token == .t_comma) {
|
||
items.append(js_ast.ArrayBinding{
|
||
.binding = Binding{ .data = Prefill.Data.BMissing, .loc = p.lexer.loc() },
|
||
}) catch unreachable;
|
||
} else {
|
||
if (p.lexer.token == .t_dot_dot_dot) {
|
||
try p.lexer.next();
|
||
has_spread = true;
|
||
|
||
// This was a bug in the ES2015 spec that was fixed in ES2016
|
||
if (p.lexer.token != .t_identifier) {
|
||
// p.markSyntaxFeature(compat.NestedRestBinding, p.lexer.Range())
|
||
|
||
}
|
||
}
|
||
|
||
const binding = try p.parseBinding(opts);
|
||
|
||
var default_value: ?Expr = null;
|
||
if (!has_spread and p.lexer.token == .t_equals) {
|
||
try p.lexer.next();
|
||
default_value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
items.append(js_ast.ArrayBinding{ .binding = binding, .default_value = default_value }) catch unreachable;
|
||
|
||
// Commas after spread elements are not allowed
|
||
if (has_spread and p.lexer.token == .t_comma) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable;
|
||
return error.SyntaxError;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.expect(.t_close_bracket);
|
||
return p.b(B.Array{
|
||
.items = items.items,
|
||
.has_spread = has_spread,
|
||
.is_single_line = is_single_line,
|
||
}, loc);
|
||
}
|
||
},
|
||
.t_open_brace => {
|
||
if (!opts.is_using_statement) {
|
||
// p.markSyntaxFeature(compat.Destructuring, p.lexer.Range())
|
||
try p.lexer.next();
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
var properties = ListManaged(js_ast.B.Property).init(p.allocator);
|
||
|
||
// "in" expressions are allowed
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
var property = try p.parsePropertyBinding();
|
||
properties.append(property) catch unreachable;
|
||
|
||
// Commas after spread elements are not allowed
|
||
if (property.flags.contains(.is_spread) and p.lexer.token == .t_comma) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Unexpected \",\" after rest pattern") catch unreachable;
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.next();
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.expect(.t_close_brace);
|
||
|
||
return p.b(B.Object{
|
||
.properties = properties.items,
|
||
.is_single_line = is_single_line,
|
||
}, loc);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
try p.lexer.expect(.t_identifier);
|
||
return Binding{ .loc = loc, .data = Prefill.Data.BMissing };
|
||
}
|
||
|
||
pub fn parsePropertyBinding(p: *P) anyerror!B.Property {
|
||
var key: js_ast.Expr = Expr{ .loc = logger.Loc.Empty, .data = Prefill.Data.EMissing };
|
||
var is_computed = false;
|
||
|
||
switch (p.lexer.token) {
|
||
.t_dot_dot_dot => {
|
||
try p.lexer.next();
|
||
const value = p.b(
|
||
B.Identifier{
|
||
.ref = p.storeNameInRef(p.lexer.identifier) catch unreachable,
|
||
},
|
||
p.lexer.loc(),
|
||
);
|
||
try p.lexer.expect(.t_identifier);
|
||
return B.Property{
|
||
.key = p.newExpr(E.Missing{}, p.lexer.loc()),
|
||
|
||
.flags = Flags.Property.init(.{ .is_spread = true }),
|
||
.value = value,
|
||
};
|
||
},
|
||
.t_numeric_literal => {
|
||
key = p.newExpr(E.Number{
|
||
.value = p.lexer.number,
|
||
}, p.lexer.loc());
|
||
// check for legacy octal literal
|
||
try p.lexer.next();
|
||
},
|
||
.t_string_literal => {
|
||
key = try p.parseStringLiteral();
|
||
},
|
||
.t_big_integer_literal => {
|
||
key = p.newExpr(E.BigInt{
|
||
.value = p.lexer.identifier,
|
||
}, p.lexer.loc());
|
||
// p.markSyntaxFeature(compat.BigInt, p.lexer.Range())
|
||
try p.lexer.next();
|
||
},
|
||
.t_open_bracket => {
|
||
is_computed = true;
|
||
try p.lexer.next();
|
||
key = try p.parseExpr(.comma);
|
||
try p.lexer.expect(.t_close_bracket);
|
||
},
|
||
else => {
|
||
const name = p.lexer.identifier;
|
||
const loc = p.lexer.loc();
|
||
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
key = p.newExpr(E.String{ .data = name }, loc);
|
||
|
||
if (p.lexer.token != .t_colon and p.lexer.token != .t_open_paren) {
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
const value = p.b(B.Identifier{ .ref = ref }, loc);
|
||
var default_value: ?Expr = null;
|
||
if (p.lexer.token == .t_equals) {
|
||
try p.lexer.next();
|
||
default_value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
return B.Property{
|
||
.key = key,
|
||
.value = value,
|
||
.default_value = default_value,
|
||
};
|
||
}
|
||
},
|
||
}
|
||
|
||
try p.lexer.expect(.t_colon);
|
||
const value = try p.parseBinding(.{});
|
||
|
||
var default_value: ?Expr = null;
|
||
if (p.lexer.token == .t_equals) {
|
||
try p.lexer.next();
|
||
default_value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
return B.Property{
|
||
.flags = Flags.Property.init(.{
|
||
.is_computed = is_computed,
|
||
}),
|
||
.key = key,
|
||
.value = value,
|
||
.default_value = default_value,
|
||
};
|
||
}
|
||
|
||
fn parseAndDeclareDecls(p: *P, kind: Symbol.Kind, opts: *ParseStatementOptions) anyerror!ListManaged(G.Decl) {
|
||
var decls = ListManaged(G.Decl).init(p.allocator);
|
||
|
||
while (true) {
|
||
// Forbid "let let" and "const let" but not "var let"
|
||
if ((kind == .other or kind == .constant) and p.lexer.isContextualKeyword("let")) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"let\" as an identifier here") catch unreachable;
|
||
}
|
||
|
||
var value: ?js_ast.Expr = null;
|
||
var local = switch (opts.is_using_statement) {
|
||
inline else => |is_using| try p.parseBinding(.{
|
||
.is_using_statement = is_using,
|
||
}),
|
||
};
|
||
p.declareBinding(kind, &local, opts) catch unreachable;
|
||
|
||
// Skip over types
|
||
if (comptime is_typescript_enabled) {
|
||
// "let foo!"
|
||
const is_definite_assignment_assertion = p.lexer.token == .t_exclamation and !p.lexer.has_newline_before;
|
||
if (is_definite_assignment_assertion) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// "let foo: number"
|
||
if (is_definite_assignment_assertion or p.lexer.token == .t_colon) {
|
||
try p.lexer.expect(.t_colon);
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token == .t_equals) {
|
||
try p.lexer.next();
|
||
value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
decls.append(G.Decl{
|
||
.binding = local,
|
||
.value = value,
|
||
}) catch unreachable;
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
return decls;
|
||
}
|
||
|
||
pub fn parseTypescriptEnumStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions) anyerror!Stmt {
|
||
try p.lexer.expect(.t_enum);
|
||
const name_loc = p.lexer.loc();
|
||
const name_text = p.lexer.identifier;
|
||
try p.lexer.expect(.t_identifier);
|
||
var name = LocRef{ .loc = name_loc, .ref = Ref.None };
|
||
|
||
// Generate the namespace object
|
||
var arg_ref: Ref = undefined;
|
||
const ts_namespace = p.getOrCreateExportedNamespaceMembers(name_text, opts.is_export, true);
|
||
const exported_members = ts_namespace.exported_members;
|
||
const enum_member_data = js_ast.TSNamespaceMember.Data{ .namespace = exported_members };
|
||
|
||
// Declare the enum and create the scope
|
||
const scope_index = p.scopes_in_order.items.len;
|
||
if (!opts.is_typescript_declare) {
|
||
name.ref = try p.declareSymbol(.ts_enum, name_loc, name_text);
|
||
_ = try p.pushScopeForParsePass(.entry, loc);
|
||
p.current_scope.ts_namespace = ts_namespace;
|
||
p.ref_to_ts_namespace_member.putNoClobber(p.allocator, name.ref.?, enum_member_data) catch bun.outOfMemory();
|
||
}
|
||
|
||
try p.lexer.expect(.t_open_brace);
|
||
|
||
// Parse the body
|
||
var values = std.ArrayList(js_ast.EnumValue).init(p.allocator);
|
||
while (p.lexer.token != .t_close_brace) {
|
||
var value = js_ast.EnumValue{ .loc = p.lexer.loc(), .ref = Ref.None, .name = undefined, .value = null };
|
||
var needs_symbol = false;
|
||
|
||
// Parse the name
|
||
if (p.lexer.token == .t_string_literal) {
|
||
value.name = (try p.lexer.toUTF8EString()).slice8();
|
||
needs_symbol = js_lexer.isIdentifier(value.name);
|
||
} else if (p.lexer.isIdentifierOrKeyword()) {
|
||
value.name = p.lexer.identifier;
|
||
needs_symbol = true;
|
||
} else {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
try p.lexer.next();
|
||
|
||
// Identifiers can be referenced by other values
|
||
if (!opts.is_typescript_declare and needs_symbol) {
|
||
value.ref = try p.declareSymbol(.other, value.loc, value.name);
|
||
}
|
||
|
||
// Parse the initializer
|
||
if (p.lexer.token == .t_equals) {
|
||
try p.lexer.next();
|
||
value.value = try p.parseExpr(.comma);
|
||
}
|
||
|
||
values.append(value) catch unreachable;
|
||
|
||
exported_members.put(p.allocator, value.name, .{
|
||
.loc = value.loc,
|
||
.data = .enum_property,
|
||
}) catch bun.outOfMemory();
|
||
|
||
if (p.lexer.token != .t_comma and p.lexer.token != .t_semicolon) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
if (!opts.is_typescript_declare) {
|
||
// Avoid a collision with the enum closure argument variable if the
|
||
// enum exports a symbol with the same name as the enum itself:
|
||
//
|
||
// enum foo {
|
||
// foo = 123,
|
||
// bar = foo,
|
||
// }
|
||
//
|
||
// TypeScript generates the following code in this case:
|
||
//
|
||
// var foo;
|
||
// (function (foo) {
|
||
// foo[foo["foo"] = 123] = "foo";
|
||
// foo[foo["bar"] = 123] = "bar";
|
||
// })(foo || (foo = {}));
|
||
//
|
||
// Whereas in this case:
|
||
//
|
||
// enum foo {
|
||
// bar = foo as any,
|
||
// }
|
||
//
|
||
// TypeScript generates the following code:
|
||
//
|
||
// var foo;
|
||
// (function (foo) {
|
||
// foo[foo["bar"] = foo] = "bar";
|
||
// })(foo || (foo = {}));
|
||
if (p.current_scope.members.contains(name_text)) {
|
||
// Add a "_" to make tests easier to read, since non-bundler tests don't
|
||
// run the renamer. For external-facing things the renamer will avoid
|
||
// collisions automatically so this isn't important for correctness.
|
||
arg_ref = p.newSymbol(.hoisted, strings.cat(p.allocator, "_", name_text) catch unreachable) catch unreachable;
|
||
p.current_scope.generated.push(p.allocator, arg_ref) catch unreachable;
|
||
} else {
|
||
arg_ref = p.declareSymbol(.hoisted, name_loc, name_text) catch unreachable;
|
||
}
|
||
p.ref_to_ts_namespace_member.put(p.allocator, arg_ref, enum_member_data) catch bun.outOfMemory();
|
||
ts_namespace.arg_ref = arg_ref;
|
||
|
||
p.popScope();
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_brace);
|
||
|
||
if (opts.is_typescript_declare) {
|
||
if (opts.is_namespace_scope and opts.is_export) {
|
||
p.has_non_local_export_declare_inside_namespace = true;
|
||
}
|
||
|
||
return p.s(S.TypeScript{}, loc);
|
||
}
|
||
|
||
// Save these for when we do out-of-order enum visiting
|
||
//
|
||
// Make a copy of "scopesInOrder" instead of a slice or index since
|
||
// the original array may be flattened in the future by
|
||
// "popAndFlattenScope"
|
||
p.scopes_in_order_for_enum.putNoClobber(
|
||
p.allocator,
|
||
loc,
|
||
scope_order_clone: {
|
||
var count: usize = 0;
|
||
for (p.scopes_in_order.items[scope_index..]) |i| {
|
||
if (i != null) count += 1;
|
||
}
|
||
|
||
const items = p.allocator.alloc(ScopeOrder, count) catch bun.outOfMemory();
|
||
var i: usize = 0;
|
||
for (p.scopes_in_order.items[scope_index..]) |item| {
|
||
items[i] = item orelse continue;
|
||
i += 1;
|
||
}
|
||
break :scope_order_clone items;
|
||
},
|
||
) catch bun.outOfMemory();
|
||
|
||
return p.s(S.Enum{
|
||
.name = name,
|
||
.arg = arg_ref,
|
||
.values = values.items,
|
||
.is_export = opts.is_export,
|
||
}, loc);
|
||
}
|
||
|
||
// Generate a TypeScript namespace object for this namespace's scope. If this
|
||
// namespace is another block that is to be merged with an existing namespace,
|
||
// use that earlier namespace's object instead.
|
||
pub fn getOrCreateExportedNamespaceMembers(p: *P, name: []const u8, is_export: bool, is_enum_scope: bool) *js_ast.TSNamespaceScope {
|
||
const map = brk: {
|
||
|
||
// Merge with a sibling namespace from the same scope
|
||
if (p.current_scope.members.get(name)) |existing_member| {
|
||
if (p.ref_to_ts_namespace_member.get(existing_member.ref)) |member_data| {
|
||
if (member_data == .namespace)
|
||
break :brk member_data.namespace;
|
||
}
|
||
}
|
||
|
||
// Merge with a sibling namespace from a different scope
|
||
if (is_export) {
|
||
if (p.current_scope.ts_namespace) |ns| {
|
||
if (ns.exported_members.get(name)) |member| {
|
||
if (member.data == .namespace)
|
||
break :brk member.data.namespace;
|
||
}
|
||
}
|
||
}
|
||
|
||
break :brk null;
|
||
};
|
||
|
||
if (map) |existing| {
|
||
return bun.create(p.allocator, js_ast.TSNamespaceScope, .{
|
||
.exported_members = existing,
|
||
.is_enum_scope = is_enum_scope,
|
||
.arg_ref = Ref.None,
|
||
});
|
||
}
|
||
|
||
// Otherwise, generate a new namespace object
|
||
// Batch the allocation of the namespace object and the map into a single allocation.
|
||
const Pair = struct {
|
||
map: js_ast.TSNamespaceMemberMap,
|
||
scope: js_ast.TSNamespaceScope,
|
||
};
|
||
|
||
var pair = p.allocator.create(Pair) catch bun.outOfMemory();
|
||
pair.map = .{};
|
||
pair.scope = .{
|
||
.exported_members = &pair.map,
|
||
.is_enum_scope = is_enum_scope,
|
||
.arg_ref = Ref.None,
|
||
};
|
||
|
||
return &pair.scope;
|
||
}
|
||
|
||
fn parseExportClause(p: *P) !ExportClauseResult {
|
||
var items = ListManaged(js_ast.ClauseItem).initCapacity(p.allocator, 1) catch unreachable;
|
||
try p.lexer.expect(.t_open_brace);
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
var first_non_identifier_loc = logger.Loc{ .start = 0 };
|
||
var had_type_only_exports = false;
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
var alias = try p.parseClauseAlias("export");
|
||
var alias_loc = p.lexer.loc();
|
||
|
||
const name = LocRef{
|
||
.loc = alias_loc,
|
||
.ref = p.storeNameInRef(alias) catch unreachable,
|
||
};
|
||
const original_name = alias;
|
||
|
||
// The name can actually be a keyword if we're really an "export from"
|
||
// statement. However, we won't know until later. Allow keywords as
|
||
// identifiers for now and throw an error later if there's no "from".
|
||
//
|
||
// // This is fine
|
||
// export { default } from 'path'
|
||
//
|
||
// // This is a syntax error
|
||
// export { default }
|
||
//
|
||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||
first_non_identifier_loc = p.lexer.loc();
|
||
}
|
||
try p.lexer.next();
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (strings.eqlComptime(alias, "type") and p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
alias = try p.parseClauseAlias("export");
|
||
alias_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||
// "export { type as as as }"
|
||
// "export { type as as foo }"
|
||
// "export { type as as 'foo' }"
|
||
_ = p.parseClauseAlias("export") catch "";
|
||
had_type_only_exports = true;
|
||
try p.lexer.next();
|
||
} else {
|
||
// "export { type as as }"
|
||
items.append(js_ast.ClauseItem{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
}) catch unreachable;
|
||
}
|
||
} else if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
|
||
// "export { type as xxx }"
|
||
// "export { type as 'xxx' }"
|
||
alias = try p.parseClauseAlias("export");
|
||
alias_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
items.append(js_ast.ClauseItem{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
}) catch unreachable;
|
||
} else {
|
||
had_type_only_exports = true;
|
||
}
|
||
} else {
|
||
// The name can actually be a keyword if we're really an "export from"
|
||
// statement. However, we won't know until later. Allow keywords as
|
||
// identifiers for now and throw an error later if there's no "from".
|
||
//
|
||
// // This is fine
|
||
// export { default } from 'path'
|
||
//
|
||
// // This is a syntax error
|
||
// export { default }
|
||
//
|
||
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
|
||
first_non_identifier_loc = p.lexer.loc();
|
||
}
|
||
|
||
// "export { type xx }"
|
||
// "export { type xx as yy }"
|
||
// "export { type xx as if }"
|
||
// "export { type default } from 'path'"
|
||
// "export { type default as if } from 'path'"
|
||
// "export { type xx as 'yy' }"
|
||
// "export { type 'xx' } from 'mod'"
|
||
_ = p.parseClauseAlias("export") catch "";
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
_ = p.parseClauseAlias("export") catch "";
|
||
try p.lexer.next();
|
||
}
|
||
|
||
had_type_only_exports = true;
|
||
}
|
||
} else {
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
alias = try p.parseClauseAlias("export");
|
||
alias_loc = p.lexer.loc();
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
items.append(js_ast.ClauseItem{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
}) catch unreachable;
|
||
}
|
||
} else {
|
||
if (p.lexer.isContextualKeyword("as")) {
|
||
try p.lexer.next();
|
||
alias = try p.parseClauseAlias("export");
|
||
alias_loc = p.lexer.loc();
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
items.append(js_ast.ClauseItem{
|
||
.alias = alias,
|
||
.alias_loc = alias_loc,
|
||
.name = name,
|
||
.original_name = original_name,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
// we're done if there's no comma
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.next();
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
try p.lexer.expect(.t_close_brace);
|
||
|
||
// Throw an error here if we found a keyword earlier and this isn't an
|
||
// "export from" statement after all
|
||
if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc);
|
||
try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true);
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return ExportClauseResult{
|
||
.clauses = items.items,
|
||
.is_single_line = is_single_line,
|
||
.had_type_only_exports = had_type_only_exports,
|
||
};
|
||
}
|
||
|
||
pub fn parsePath(p: *P) !ParsedPath {
|
||
const path_text = try p.lexer.toUTF8EString();
|
||
var path = ParsedPath{
|
||
.loc = p.lexer.loc(),
|
||
.text = path_text.slice8(),
|
||
.is_macro = false,
|
||
.import_tag = .none,
|
||
};
|
||
|
||
if (p.lexer.token == .t_no_substitution_template_literal) {
|
||
try p.lexer.next();
|
||
} else {
|
||
try p.lexer.expect(.t_string_literal);
|
||
}
|
||
|
||
if (!p.lexer.has_newline_before and (
|
||
// Import Assertions are deprecated.
|
||
// Import Attributes are the new way to do this.
|
||
// But some code may still use "assert"
|
||
// We support both and treat them identically.
|
||
// Once Prettier & TypeScript support import attributes, we will add runtime support
|
||
p.lexer.isContextualKeyword("assert") or p.lexer.token == .t_with))
|
||
{
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_open_brace);
|
||
|
||
const SupportedAttribute = enum {
|
||
type,
|
||
embed,
|
||
bunBakeGraph,
|
||
};
|
||
|
||
var has_seen_embed_true = false;
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
const supported_attribute: ?SupportedAttribute = brk: {
|
||
// Parse the key
|
||
if (p.lexer.isIdentifierOrKeyword()) {
|
||
inline for (comptime std.enums.values(SupportedAttribute)) |t| {
|
||
if (strings.eqlComptime(p.lexer.identifier, @tagName(t))) {
|
||
break :brk t;
|
||
}
|
||
}
|
||
} else if (p.lexer.token == .t_string_literal) {
|
||
const string_literal_text = (try p.lexer.toUTF8EString()).slice8();
|
||
inline for (comptime std.enums.values(SupportedAttribute)) |t| {
|
||
if (strings.eqlComptime(string_literal_text, @tagName(t))) {
|
||
break :brk t;
|
||
}
|
||
}
|
||
} else {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
break :brk null;
|
||
};
|
||
|
||
try p.lexer.next();
|
||
try p.lexer.expect(.t_colon);
|
||
|
||
try p.lexer.expect(.t_string_literal);
|
||
const string_literal_text = (try p.lexer.toUTF8EString()).slice8();
|
||
if (supported_attribute) |attr| {
|
||
switch (attr) {
|
||
.type => {
|
||
const type_attr = string_literal_text;
|
||
if (strings.eqlComptime(type_attr, "macro")) {
|
||
path.is_macro = true;
|
||
} else if (strings.eqlComptime(type_attr, "sqlite")) {
|
||
path.import_tag = .with_type_sqlite;
|
||
if (has_seen_embed_true) {
|
||
path.import_tag = .with_type_sqlite_embedded;
|
||
}
|
||
} else if (strings.eqlComptime(type_attr, "json")) {
|
||
path.import_tag = .with_type_json;
|
||
} else if (strings.eqlComptime(type_attr, "toml")) {
|
||
path.import_tag = .with_type_toml;
|
||
} else if (strings.eqlComptime(type_attr, "text")) {
|
||
path.import_tag = .with_type_text;
|
||
} else if (strings.eqlComptime(type_attr, "file")) {
|
||
path.import_tag = .with_type_file;
|
||
}
|
||
},
|
||
.embed => {
|
||
if (strings.eqlComptime(string_literal_text, "true")) {
|
||
has_seen_embed_true = true;
|
||
if (path.import_tag == .with_type_sqlite) {
|
||
path.import_tag = .with_type_sqlite_embedded;
|
||
}
|
||
}
|
||
},
|
||
.bunBakeGraph => {
|
||
if (strings.eqlComptime(string_literal_text, "ssr")) {
|
||
path.import_tag = .bake_resolve_to_ssr_graph;
|
||
} else {
|
||
try p.lexer.addRangeError(p.lexer.range(), "'bunBakeGraph' can only be set to 'ssr'", .{}, true);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_brace);
|
||
}
|
||
|
||
return path;
|
||
}
|
||
|
||
// TODO:
|
||
pub fn checkForNonBMPCodePoint(_: *P, _: logger.Loc, _: string) void {}
|
||
|
||
fn parseStmtsUpTo(p: *P, eend: js_lexer.T, _opts: *ParseStatementOptions) ![]Stmt {
|
||
var opts = _opts.*;
|
||
var stmts = StmtList.init(p.allocator);
|
||
|
||
var returnWithoutSemicolonStart: i32 = -1;
|
||
opts.lexical_decl = .allow_all;
|
||
var isDirectivePrologue = true;
|
||
|
||
while (true) {
|
||
for (p.lexer.comments_to_preserve_before.items) |comment| {
|
||
try stmts.append(p.s(S.Comment{
|
||
.text = comment.text,
|
||
}, p.lexer.loc()));
|
||
}
|
||
p.lexer.comments_to_preserve_before.clearRetainingCapacity();
|
||
|
||
if (p.lexer.token == eend) {
|
||
break;
|
||
}
|
||
|
||
var current_opts = opts;
|
||
var stmt = try p.parseStmt(¤t_opts);
|
||
|
||
// Skip TypeScript types entirely
|
||
if (is_typescript_enabled) {
|
||
switch (stmt.data) {
|
||
.s_type_script => {
|
||
continue;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
var skip = stmt.data == .s_empty;
|
||
// Parse one or more directives at the beginning
|
||
if (isDirectivePrologue) {
|
||
isDirectivePrologue = false;
|
||
switch (stmt.data) {
|
||
.s_expr => |expr| {
|
||
switch (expr.value.data) {
|
||
.e_string => |str| {
|
||
if (!str.prefer_template) {
|
||
isDirectivePrologue = true;
|
||
|
||
if (str.eqlComptime("use strict")) {
|
||
skip = true;
|
||
// Track "use strict" directives
|
||
p.current_scope.strict_mode = .explicit_strict_mode;
|
||
if (p.current_scope == p.module_scope)
|
||
p.module_scope_directive_loc = stmt.loc;
|
||
} else if (str.eqlComptime("use asm")) {
|
||
skip = true;
|
||
stmt.data = Prefill.Data.SEmpty;
|
||
} else {
|
||
stmt = Stmt.alloc(S.Directive, S.Directive{
|
||
.value = str.slice(p.allocator),
|
||
}, stmt.loc);
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
if (!skip)
|
||
try stmts.append(stmt);
|
||
|
||
// Warn about ASI and return statements. Here's an example of code with
|
||
// this problem: https://github.com/rollup/rollup/issues/3729
|
||
if (!p.options.suppress_warnings_about_weird_code) {
|
||
var needsCheck = true;
|
||
switch (stmt.data) {
|
||
.s_return => |ret| {
|
||
if (ret.value == null and !p.latest_return_had_semicolon) {
|
||
returnWithoutSemicolonStart = stmt.loc.start;
|
||
needsCheck = false;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (needsCheck and returnWithoutSemicolonStart != -1) {
|
||
switch (stmt.data) {
|
||
.s_expr => {
|
||
try p.log.addWarning(
|
||
p.source,
|
||
logger.Loc{ .start = returnWithoutSemicolonStart + 6 },
|
||
"The following expression is not returned because of an automatically-inserted semicolon",
|
||
);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
returnWithoutSemicolonStart = -1;
|
||
}
|
||
}
|
||
}
|
||
|
||
return stmts.items;
|
||
}
|
||
|
||
fn markStrictModeFeature(p: *P, feature: StrictModeFeature, r: logger.Range, detail: string) anyerror!void {
|
||
const can_be_transformed = feature == StrictModeFeature.for_in_var_init;
|
||
const text = switch (feature) {
|
||
.with_statement => "With statements",
|
||
.delete_bare_name => "\"delete\" of a bare identifier",
|
||
.for_in_var_init => "Variable initializers within for-in loops",
|
||
.eval_or_arguments => try std.fmt.allocPrint(p.allocator, "Declarations with the name \"{s}\"", .{detail}),
|
||
.reserved_word => try std.fmt.allocPrint(p.allocator, "\"{s}\" is a reserved word and", .{detail}),
|
||
.legacy_octal_literal => "Legacy octal literals",
|
||
.legacy_octal_escape => "Legacy octal escape sequences",
|
||
.if_else_function_stmt => "Function declarations inside if statements",
|
||
// else => {
|
||
// text = "This feature";
|
||
// },
|
||
};
|
||
|
||
const scope = p.current_scope;
|
||
if (p.isStrictMode()) {
|
||
var why: string = "";
|
||
var where: logger.Range = logger.Range.None;
|
||
switch (scope.strict_mode) {
|
||
.implicit_strict_mode_import => {
|
||
where = p.esm_import_keyword;
|
||
},
|
||
.implicit_strict_mode_export => {
|
||
where = p.esm_export_keyword;
|
||
},
|
||
.implicit_strict_mode_top_level_await => {
|
||
where = p.top_level_await_keyword;
|
||
},
|
||
.implicit_strict_mode_class => {
|
||
why = "All code inside a class is implicitly in strict mode";
|
||
where = p.enclosing_class_keyword;
|
||
},
|
||
else => {},
|
||
}
|
||
if (why.len == 0) {
|
||
why = try std.fmt.allocPrint(p.allocator, "This file is implicitly in strict mode because of the \"{s}\" keyword here", .{p.source.textForRange(where)});
|
||
}
|
||
var notes = try p.allocator.alloc(logger.Data, 1);
|
||
notes[0] = logger.rangeData(p.source, where, why);
|
||
try p.log.addRangeErrorWithNotes(p.source, r, try std.fmt.allocPrint(p.allocator, "{s} cannot be used in strict mode", .{text}), notes);
|
||
} else if (!can_be_transformed and p.isStrictModeOutputFormat()) {
|
||
try p.log.addRangeError(p.source, r, try std.fmt.allocPrint(p.allocator, "{s} cannot be used with the ESM output format due to strict mode", .{text}));
|
||
}
|
||
}
|
||
|
||
pub inline fn isStrictMode(p: *P) bool {
|
||
return p.current_scope.strict_mode != .sloppy_mode;
|
||
}
|
||
|
||
pub inline fn isStrictModeOutputFormat(p: *P) bool {
|
||
return p.options.bundle and p.options.output_format.isESM();
|
||
}
|
||
|
||
pub fn declareCommonJSSymbol(p: *P, comptime kind: Symbol.Kind, comptime name: string) !Ref {
|
||
const name_hash = comptime Scope.getMemberHash(name);
|
||
const member = p.module_scope.getMemberWithHash(name, name_hash);
|
||
|
||
// If the code declared this symbol using "var name", then this is actually
|
||
// not a collision. For example, node will let you do this:
|
||
//
|
||
// var exports;
|
||
// module.exports.foo = 123;
|
||
// console.log(exports.foo);
|
||
//
|
||
// This works because node's implementation of CommonJS wraps the entire
|
||
// source file like this:
|
||
//
|
||
// (function(require, exports, module, __filename, __dirname) {
|
||
// var exports;
|
||
// module.exports.foo = 123;
|
||
// console.log(exports.foo);
|
||
// })
|
||
//
|
||
// Both the "exports" argument and "var exports" are hoisted variables, so
|
||
// they don't collide.
|
||
if (member) |_member| {
|
||
if (p.symbols.items[_member.ref.innerIndex()].kind == .hoisted and kind == .hoisted and !p.has_es_module_syntax) {
|
||
return _member.ref;
|
||
}
|
||
}
|
||
|
||
// Create a new symbol if we didn't merge with an existing one above
|
||
const ref = try p.newSymbol(kind, name);
|
||
|
||
if (member == null) {
|
||
try p.module_scope.members.put(p.allocator, name, Scope.Member{ .ref = ref, .loc = logger.Loc.Empty });
|
||
return ref;
|
||
}
|
||
|
||
// If the variable was declared, then it shadows this symbol. The code in
|
||
// this module will be unable to reference this symbol. However, we must
|
||
// still add the symbol to the scope so it gets minified (automatically-
|
||
// generated code may still reference the symbol).
|
||
try p.module_scope.generated.push(p.allocator, ref);
|
||
return ref;
|
||
}
|
||
|
||
fn declareGeneratedSymbol(p: *P, kind: Symbol.Kind, comptime name: string) !Ref {
|
||
// The bundler runs the renamer, so it is ok to not append a hash
|
||
if (p.options.bundle) {
|
||
return try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, name, true);
|
||
}
|
||
|
||
return try declareSymbolMaybeGenerated(p, kind, logger.Loc.Empty, generatedSymbolName(name), true);
|
||
}
|
||
|
||
fn declareSymbol(p: *P, kind: Symbol.Kind, loc: logger.Loc, name: string) !Ref {
|
||
return try @call(bun.callmod_inline, declareSymbolMaybeGenerated, .{ p, kind, loc, name, false });
|
||
}
|
||
|
||
fn declareSymbolMaybeGenerated(p: *P, kind: Symbol.Kind, loc: logger.Loc, name: string, comptime is_generated: bool) !Ref {
|
||
// p.checkForNonBMPCodePoint(loc, name)
|
||
if (comptime !is_generated) {
|
||
// Forbid declaring a symbol with a reserved word in strict mode
|
||
if (p.isStrictMode() and name.ptr != arguments_str.ptr and js_lexer.StrictModeReservedWords.has(name)) {
|
||
try p.markStrictModeFeature(.reserved_word, js_lexer.rangeOfIdentifier(p.source, loc), name);
|
||
}
|
||
}
|
||
|
||
// Allocate a new symbol
|
||
var ref = try p.newSymbol(kind, name);
|
||
|
||
const scope = p.current_scope;
|
||
const entry = try scope.members.getOrPut(p.allocator, name);
|
||
if (entry.found_existing) {
|
||
const existing = entry.value_ptr.*;
|
||
var symbol: *Symbol = &p.symbols.items[existing.ref.innerIndex()];
|
||
|
||
if (comptime !is_generated) {
|
||
switch (scope.canMergeSymbols(symbol.kind, kind, is_typescript_enabled)) {
|
||
.forbidden => {
|
||
try p.log.addSymbolAlreadyDeclaredError(p.allocator, p.source, symbol.original_name, loc, existing.loc);
|
||
return existing.ref;
|
||
},
|
||
|
||
.keep_existing => {
|
||
ref = existing.ref;
|
||
},
|
||
|
||
.replace_with_new => {
|
||
symbol.link = ref;
|
||
|
||
// If these are both functions, remove the overwritten declaration
|
||
if (kind.isFunction() and symbol.kind.isFunction()) {
|
||
symbol.remove_overwritten_function_declaration = true;
|
||
}
|
||
},
|
||
|
||
.become_private_get_set_pair => {
|
||
ref = existing.ref;
|
||
symbol.kind = .private_get_set_pair;
|
||
},
|
||
|
||
.become_private_static_get_set_pair => {
|
||
ref = existing.ref;
|
||
symbol.kind = .private_static_get_set_pair;
|
||
},
|
||
|
||
.overwrite_with_new => {},
|
||
}
|
||
} else {
|
||
p.symbols.items[ref.innerIndex()].link = existing.ref;
|
||
}
|
||
}
|
||
entry.key_ptr.* = name;
|
||
entry.value_ptr.* = js_ast.Scope.Member{ .ref = ref, .loc = loc };
|
||
if (comptime is_generated) {
|
||
try p.module_scope.generated.push(p.allocator, ref);
|
||
}
|
||
return ref;
|
||
}
|
||
|
||
fn validateFunctionName(p: *P, func: G.Fn, kind: FunctionKind) void {
|
||
if (func.name) |name| {
|
||
const original_name = p.symbols.items[name.ref.?.innerIndex()].original_name;
|
||
|
||
if (func.flags.contains(.is_async) and strings.eqlComptime(original_name, "await")) {
|
||
p.log.addRangeError(
|
||
p.source,
|
||
js_lexer.rangeOfIdentifier(p.source, name.loc),
|
||
"An async function cannot be named \"await\"",
|
||
) catch unreachable;
|
||
} else if (kind == .expr and func.flags.contains(.is_generator) and strings.eqlComptime(original_name, "yield")) {
|
||
p.log.addRangeError(
|
||
p.source,
|
||
js_lexer.rangeOfIdentifier(p.source, name.loc),
|
||
"An generator function expression cannot be named \"yield\"",
|
||
) catch unreachable;
|
||
}
|
||
}
|
||
}
|
||
|
||
fn parseFnExpr(p: *P, loc: logger.Loc, is_async: bool, async_range: logger.Range) !Expr {
|
||
try p.lexer.next();
|
||
const is_generator = p.lexer.token == T.t_asterisk;
|
||
if (is_generator) {
|
||
// p.markSyntaxFeature()
|
||
try p.lexer.next();
|
||
} else if (is_async) {
|
||
// p.markLoweredSyntaxFeature(compat.AsyncAwait, asyncRange, compat.Generator)
|
||
}
|
||
|
||
var name: ?js_ast.LocRef = null;
|
||
|
||
_ = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||
|
||
// The name is optional
|
||
if (p.lexer.token == .t_identifier) {
|
||
const text = p.lexer.identifier;
|
||
|
||
// Don't declare the name "arguments" since it's shadowed and inaccessible
|
||
name = js_ast.LocRef{
|
||
.loc = p.lexer.loc(),
|
||
.ref = if (text.len > 0 and !strings.eqlComptime(text, "arguments"))
|
||
try p.declareSymbol(.hoisted_function, p.lexer.loc(), text)
|
||
else
|
||
try p.newSymbol(.hoisted_function, text),
|
||
};
|
||
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Even anonymous functions can have TypeScript type parameters
|
||
if (comptime is_typescript_enabled) {
|
||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||
}
|
||
|
||
const func = try p.parseFn(name, FnOrArrowDataParse{
|
||
.needs_async_loc = loc,
|
||
.async_range = async_range,
|
||
.allow_await = if (is_async) .allow_expr else .allow_ident,
|
||
.allow_yield = if (is_generator) .allow_expr else .allow_ident,
|
||
});
|
||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||
|
||
p.validateFunctionName(func, .expr);
|
||
p.popScope();
|
||
|
||
return p.newExpr(js_ast.E.Function{
|
||
.func = func,
|
||
}, loc);
|
||
}
|
||
|
||
fn parseFnBody(p: *P, data: *FnOrArrowDataParse) !G.FnBody {
|
||
const oldFnOrArrowData = p.fn_or_arrow_data_parse;
|
||
const oldAllowIn = p.allow_in;
|
||
p.fn_or_arrow_data_parse = data.*;
|
||
p.allow_in = true;
|
||
|
||
const loc = p.lexer.loc();
|
||
_ = try p.pushScopeForParsePass(Scope.Kind.function_body, p.lexer.loc());
|
||
defer p.popScope();
|
||
|
||
try p.lexer.expect(.t_open_brace);
|
||
var opts = ParseStatementOptions{};
|
||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &opts);
|
||
try p.lexer.next();
|
||
|
||
p.allow_in = oldAllowIn;
|
||
p.fn_or_arrow_data_parse = oldFnOrArrowData;
|
||
return G.FnBody{ .loc = loc, .stmts = stmts };
|
||
}
|
||
|
||
fn parseArrowBody(p: *P, args: []js_ast.G.Arg, data: *FnOrArrowDataParse) !E.Arrow {
|
||
const arrow_loc = p.lexer.loc();
|
||
|
||
// Newlines are not allowed before "=>"
|
||
if (p.lexer.has_newline_before) {
|
||
try p.log.addRangeError(p.source, p.lexer.range(), "Unexpected newline before \"=>\"");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.expect(T.t_equals_greater_than);
|
||
|
||
for (args) |*arg| {
|
||
var opts = ParseStatementOptions{};
|
||
try p.declareBinding(Symbol.Kind.hoisted, &arg.binding, &opts);
|
||
}
|
||
|
||
// The ability to use "this" and "super()" is inherited by arrow functions
|
||
data.allow_super_call = p.fn_or_arrow_data_parse.allow_super_call;
|
||
data.allow_super_property = p.fn_or_arrow_data_parse.allow_super_property;
|
||
data.is_this_disallowed = p.fn_or_arrow_data_parse.is_this_disallowed;
|
||
|
||
if (p.lexer.token == .t_open_brace) {
|
||
const body = try p.parseFnBody(data);
|
||
p.after_arrow_body_loc = p.lexer.loc();
|
||
return E.Arrow{ .args = args, .body = body };
|
||
}
|
||
|
||
_ = try p.pushScopeForParsePass(Scope.Kind.function_body, arrow_loc);
|
||
defer p.popScope();
|
||
|
||
var old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_parse);
|
||
|
||
p.fn_or_arrow_data_parse = data.*;
|
||
const expr = try p.parseExpr(Level.comma);
|
||
p.fn_or_arrow_data_parse = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_parse), &old_fn_or_arrow_data);
|
||
|
||
var stmts = try p.allocator.alloc(Stmt, 1);
|
||
stmts[0] = p.s(S.Return{ .value = expr }, expr.loc);
|
||
return E.Arrow{ .args = args, .prefer_expr = true, .body = G.FnBody{ .loc = arrow_loc, .stmts = stmts } };
|
||
}
|
||
|
||
fn declareBinding(p: *P, kind: Symbol.Kind, binding: *BindingNodeIndex, opts: *ParseStatementOptions) anyerror!void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |bind| {
|
||
if (!opts.is_typescript_declare or (opts.is_namespace_scope and opts.is_export)) {
|
||
bind.ref = try p.declareSymbol(kind, binding.loc, p.loadNameFromRef(bind.ref));
|
||
}
|
||
},
|
||
.b_array => |bind| {
|
||
for (bind.items) |*item| {
|
||
p.declareBinding(kind, &item.binding, opts) catch unreachable;
|
||
}
|
||
},
|
||
.b_object => |bind| {
|
||
for (bind.properties) |*prop| {
|
||
p.declareBinding(kind, &prop.value, opts) catch unreachable;
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
pub fn storeNameInRef(p: *P, name: string) !Ref {
|
||
if (comptime track_symbol_usage_during_parse_pass) {
|
||
if (p.parse_pass_symbol_uses.getPtr(name)) |res| {
|
||
res.used = true;
|
||
}
|
||
}
|
||
|
||
if (@intFromPtr(p.source.contents.ptr) <= @intFromPtr(name.ptr) and (@intFromPtr(name.ptr) + name.len) <= (@intFromPtr(p.source.contents.ptr) + p.source.contents.len)) {
|
||
return Ref.initSourceEnd(.{
|
||
.source_index = @intCast(@intFromPtr(name.ptr) - @intFromPtr(p.source.contents.ptr)),
|
||
.inner_index = @intCast(name.len),
|
||
.tag = .source_contents_slice,
|
||
});
|
||
} else {
|
||
const inner_index: u31 = @intCast(p.allocated_names.items.len);
|
||
try p.allocated_names.append(p.allocator, name);
|
||
return Ref.init(
|
||
inner_index,
|
||
p.source.index.get(),
|
||
false,
|
||
);
|
||
}
|
||
}
|
||
|
||
pub fn loadNameFromRef(p: *P, ref: Ref) string {
|
||
return switch (ref.tag) {
|
||
.symbol => p.symbols.items[ref.innerIndex()].original_name,
|
||
.source_contents_slice => p.source.contents[ref.sourceIndex() .. ref.sourceIndex() + ref.innerIndex()],
|
||
.allocated_name => p.allocated_names.items[ref.innerIndex()],
|
||
else => @panic("Internal error: JS parser tried to load an invalid name from a Ref"),
|
||
};
|
||
}
|
||
|
||
// This parses an expression. This assumes we've already parsed the "async"
|
||
// keyword and are currently looking at the following token.
|
||
pub fn parseAsyncPrefixExpr(p: *P, async_range: logger.Range, level: Level) !Expr {
|
||
// "async function() {}"
|
||
if (!p.lexer.has_newline_before and p.lexer.token == T.t_function) {
|
||
return try p.parseFnExpr(async_range.loc, true, async_range);
|
||
}
|
||
|
||
// Check the precedence level to avoid parsing an arrow function in
|
||
// "new async () => {}". This also avoids parsing "new async()" as
|
||
// "new (async())()" instead.
|
||
if (!p.lexer.has_newline_before and level.lt(.member)) {
|
||
switch (p.lexer.token) {
|
||
// "async => {}"
|
||
.t_equals_greater_than => {
|
||
if (level.lte(.assign)) {
|
||
var args = try p.allocator.alloc(G.Arg, 1);
|
||
args[0] = G.Arg{ .binding = p.b(
|
||
B.Identifier{
|
||
.ref = try p.storeNameInRef("async"),
|
||
},
|
||
async_range.loc,
|
||
) };
|
||
_ = p.pushScopeForParsePass(.function_args, async_range.loc) catch unreachable;
|
||
var data = FnOrArrowDataParse{
|
||
.needs_async_loc = async_range.loc,
|
||
};
|
||
const arrow_body = try p.parseArrowBody(args, &data);
|
||
p.popScope();
|
||
return p.newExpr(arrow_body, async_range.loc);
|
||
}
|
||
},
|
||
// "async x => {}"
|
||
.t_identifier => {
|
||
if (level.lte(.assign)) {
|
||
// p.markLoweredSyntaxFeature();
|
||
|
||
const ref = try p.storeNameInRef(p.lexer.identifier);
|
||
var args = try p.allocator.alloc(G.Arg, 1);
|
||
args[0] = G.Arg{ .binding = p.b(
|
||
B.Identifier{
|
||
.ref = ref,
|
||
},
|
||
p.lexer.loc(),
|
||
) };
|
||
try p.lexer.next();
|
||
|
||
_ = try p.pushScopeForParsePass(.function_args, async_range.loc);
|
||
defer p.popScope();
|
||
|
||
var data = FnOrArrowDataParse{
|
||
.allow_await = .allow_expr,
|
||
.needs_async_loc = args[0].binding.loc,
|
||
};
|
||
var arrowBody = try p.parseArrowBody(args, &data);
|
||
arrowBody.is_async = true;
|
||
return p.newExpr(arrowBody, async_range.loc);
|
||
}
|
||
},
|
||
|
||
// "async()"
|
||
// "async () => {}"
|
||
.t_open_paren => {
|
||
try p.lexer.next();
|
||
return p.parseParenExpr(async_range.loc, level, ParenExprOpts{ .is_async = true, .async_range = async_range });
|
||
},
|
||
|
||
// "async<T>()"
|
||
// "async <T>() => {}"
|
||
.t_less_than => {
|
||
if (is_typescript_enabled and (!is_jsx_enabled or try TypeScript.isTSArrowFnJSX(p))) {
|
||
switch (p.trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking()) {
|
||
.did_not_skip_anything => {},
|
||
else => |result| {
|
||
try p.lexer.next();
|
||
return p.parseParenExpr(async_range.loc, level, ParenExprOpts{
|
||
.is_async = true,
|
||
.async_range = async_range,
|
||
.force_arrow_fn = result == .definitely_type_parameters,
|
||
});
|
||
},
|
||
}
|
||
}
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// "async"
|
||
// "async + 1"
|
||
return p.newExpr(
|
||
E.Identifier{ .ref = try p.storeNameInRef("async") },
|
||
async_range.loc,
|
||
);
|
||
}
|
||
|
||
pub const Backtracking = struct {
|
||
pub inline fn lexerBacktracker(p: *P, func: anytype, comptime ReturnType: type) ReturnType {
|
||
p.markTypeScriptOnly();
|
||
const old_lexer = p.lexer;
|
||
const old_log_disabled = p.lexer.is_log_disabled;
|
||
p.lexer.is_log_disabled = true;
|
||
defer p.lexer.is_log_disabled = old_log_disabled;
|
||
var backtrack = false;
|
||
const FnReturnType = bun.meta.ReturnOf(func);
|
||
const result = func(p) catch |err| brk: {
|
||
switch (err) {
|
||
error.Backtrack => {
|
||
backtrack = true;
|
||
},
|
||
else => {
|
||
if (p.lexer.did_panic) {
|
||
backtrack = true;
|
||
}
|
||
},
|
||
}
|
||
if (comptime FnReturnType == anyerror!bool or FnReturnType == anyerror!void)
|
||
// we are not using the value
|
||
break :brk undefined;
|
||
|
||
break :brk SkipTypeParameterResult.did_not_skip_anything;
|
||
};
|
||
|
||
if (backtrack) {
|
||
p.lexer.restore(&old_lexer);
|
||
|
||
if (comptime FnReturnType == anyerror!bool) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
if (comptime FnReturnType == anyerror!bool) {
|
||
return true;
|
||
}
|
||
|
||
if (comptime ReturnType == void or ReturnType == bool)
|
||
// If we did not backtrack, then we skipped successfully.
|
||
return !backtrack;
|
||
|
||
return result;
|
||
}
|
||
|
||
pub inline fn lexerBacktrackerWithArgs(p: *P, func: anytype, args: anytype, comptime ReturnType: type) ReturnType {
|
||
p.markTypeScriptOnly();
|
||
const old_lexer = p.lexer;
|
||
const old_log_disabled = p.lexer.is_log_disabled;
|
||
p.lexer.is_log_disabled = true;
|
||
|
||
defer p.lexer.is_log_disabled = old_log_disabled;
|
||
var backtrack = false;
|
||
const FnReturnType = bun.meta.ReturnOf(func);
|
||
const result = @call(.auto, func, args) catch |err| brk: {
|
||
switch (err) {
|
||
error.Backtrack => {
|
||
backtrack = true;
|
||
},
|
||
else => {},
|
||
}
|
||
if (comptime FnReturnType == anyerror!bool or FnReturnType == anyerror!void)
|
||
// we are not using the value
|
||
break :brk undefined;
|
||
break :brk SkipTypeParameterResult.did_not_skip_anything;
|
||
};
|
||
|
||
if (backtrack) {
|
||
p.lexer.restore(&old_lexer);
|
||
if (comptime FnReturnType == anyerror!bool) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
if (comptime FnReturnType == anyerror!bool) {
|
||
return true;
|
||
}
|
||
|
||
if (comptime ReturnType == void or ReturnType == bool) return backtrack;
|
||
return result;
|
||
}
|
||
|
||
pub fn skipTypeScriptTypeParametersThenOpenParenWithBacktracking(p: *P) anyerror!SkipTypeParameterResult {
|
||
const result = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true });
|
||
if (p.lexer.token != .t_open_paren) {
|
||
return error.Backtrack;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
pub fn skipTypeScriptConstraintOfInferTypeWithBacktracking(p: *P, flags: TypeScript.SkipTypeOptions.Bitset) anyerror!bool {
|
||
try p.lexer.expect(.t_extends);
|
||
try p.skipTypeScriptTypeWithOpts(.prefix, TypeScript.SkipTypeOptions.Bitset.initOne(.disallow_conditional_types), false, {});
|
||
|
||
if (!flags.contains(.disallow_conditional_types) and p.lexer.token == .t_question) {
|
||
return error.Backtrack;
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
pub fn skipTypeScriptArrowArgsWithBacktracking(p: *P) anyerror!bool {
|
||
try p.skipTypescriptFnArgs();
|
||
p.lexer.expect(.t_equals_greater_than) catch
|
||
return error.Backtrack;
|
||
|
||
return true;
|
||
}
|
||
|
||
pub fn skipTypeScriptTypeArgumentsWithBacktracking(p: *P) anyerror!bool {
|
||
if (try p.skipTypeScriptTypeArguments(false)) {
|
||
// Check the token after this and backtrack if it's the wrong one
|
||
if (!TypeScript.canFollowTypeArgumentsInExpression(p)) {
|
||
return error.Backtrack;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
pub fn skipTypeScriptArrowReturnTypeWithBacktracking(p: *P) anyerror!void {
|
||
try p.lexer.expect(.t_colon);
|
||
|
||
try p.skipTypescriptReturnType();
|
||
// Check the token after this and backtrack if it's the wrong one
|
||
if (p.lexer.token != .t_equals_greater_than) {
|
||
return error.Backtrack;
|
||
}
|
||
}
|
||
};
|
||
|
||
pub fn trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking(p: *P) SkipTypeParameterResult {
|
||
return Backtracking.lexerBacktracker(p, Backtracking.skipTypeScriptTypeParametersThenOpenParenWithBacktracking, SkipTypeParameterResult);
|
||
}
|
||
|
||
pub fn trySkipTypeScriptTypeArgumentsWithBacktracking(p: *P) bool {
|
||
return Backtracking.lexerBacktracker(p, Backtracking.skipTypeScriptTypeArgumentsWithBacktracking, bool);
|
||
}
|
||
|
||
pub fn trySkipTypeScriptArrowReturnTypeWithBacktracking(p: *P) bool {
|
||
return Backtracking.lexerBacktracker(p, Backtracking.skipTypeScriptArrowReturnTypeWithBacktracking, bool);
|
||
}
|
||
|
||
pub fn trySkipTypeScriptArrowArgsWithBacktracking(p: *P) bool {
|
||
return Backtracking.lexerBacktracker(p, Backtracking.skipTypeScriptArrowArgsWithBacktracking, bool);
|
||
}
|
||
|
||
pub fn trySkipTypeScriptConstraintOfInferTypeWithBacktracking(p: *P, flags: TypeScript.SkipTypeOptions.Bitset) bool {
|
||
return Backtracking.lexerBacktrackerWithArgs(p, Backtracking.skipTypeScriptConstraintOfInferTypeWithBacktracking, .{ p, flags }, bool);
|
||
}
|
||
|
||
pub inline fn parseExprOrBindings(p: *P, level: Level, errors: ?*DeferredErrors) anyerror!Expr {
|
||
return try p.parseExprCommon(level, errors, Expr.EFlags.none);
|
||
}
|
||
|
||
pub inline fn parseExpr(p: *P, level: Level) anyerror!Expr {
|
||
return try p.parseExprCommon(level, null, Expr.EFlags.none);
|
||
}
|
||
|
||
pub inline fn parseExprWithFlags(p: *P, level: Level, flags: Expr.EFlags) anyerror!Expr {
|
||
return try p.parseExprCommon(level, null, flags);
|
||
}
|
||
|
||
pub fn parseExprCommon(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||
const had_pure_comment_before = p.lexer.has_pure_comment_before and !p.options.ignore_dce_annotations;
|
||
var expr = try p.parsePrefix(level, errors, flags);
|
||
|
||
// There is no formal spec for "__PURE__" comments but from reverse-
|
||
// engineering, it looks like they apply to the next CallExpression or
|
||
// NewExpression. So in "/* @__PURE__ */ a().b() + c()" the comment applies
|
||
// to the expression "a().b()".
|
||
|
||
if (had_pure_comment_before and level.lt(.call)) {
|
||
expr = try p.parseSuffix(expr, @as(Level, @enumFromInt(@intFromEnum(Level.call) - 1)), errors, flags);
|
||
switch (expr.data) {
|
||
.e_call => |ex| {
|
||
ex.can_be_unwrapped_if_unused = true;
|
||
},
|
||
.e_new => |ex| {
|
||
ex.can_be_unwrapped_if_unused = true;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
return try p.parseSuffix(expr, level, errors, flags);
|
||
}
|
||
|
||
pub inline fn addImportRecord(p: *P, kind: ImportKind, loc: logger.Loc, name: string) u32 {
|
||
return p.addImportRecordByRange(kind, p.source.rangeOfString(loc), name);
|
||
}
|
||
|
||
pub fn addImportRecordByRange(p: *P, kind: ImportKind, range: logger.Range, name: string) u32 {
|
||
return p.addImportRecordByRangeAndPath(kind, range, fs.Path.init(name));
|
||
}
|
||
|
||
pub fn addImportRecordByRangeAndPath(p: *P, kind: ImportKind, range: logger.Range, path: fs.Path) u32 {
|
||
const index = p.import_records.items.len;
|
||
const record = ImportRecord{
|
||
.kind = kind,
|
||
.range = range,
|
||
.path = path,
|
||
};
|
||
p.import_records.append(record) catch unreachable;
|
||
return @as(u32, @intCast(index));
|
||
}
|
||
|
||
pub fn popScope(p: *P) void {
|
||
const current_scope = p.current_scope;
|
||
// We cannot rename anything inside a scope containing a direct eval() call
|
||
if (current_scope.contains_direct_eval) {
|
||
var iter = current_scope.members.iterator();
|
||
while (iter.next()) |member| {
|
||
|
||
// Using direct eval when bundling is not a good idea in general because
|
||
// esbuild must assume that it can potentially reach anything in any of
|
||
// the containing scopes. We try to make it work but this isn't possible
|
||
// in some cases.
|
||
//
|
||
// For example, symbols imported using an ESM import are a live binding
|
||
// to the underlying symbol in another file. This is emulated during
|
||
// scope hoisting by erasing the ESM import and just referencing the
|
||
// underlying symbol in the flattened bundle directly. However, that
|
||
// symbol may have a different name which could break uses of direct
|
||
// eval:
|
||
//
|
||
// // Before bundling
|
||
// import { foo as bar } from './foo.js'
|
||
// console.log(eval('bar'))
|
||
//
|
||
// // After bundling
|
||
// let foo = 123 // The contents of "foo.js"
|
||
// console.log(eval('bar'))
|
||
//
|
||
// There really isn't any way to fix this. You can't just rename "foo" to
|
||
// "bar" in the example above because there may be a third bundled file
|
||
// that also contains direct eval and imports the same symbol with a
|
||
// different conflicting import alias. And there is no way to store a
|
||
// live binding to the underlying symbol in a variable with the import's
|
||
// name so that direct eval can access it:
|
||
//
|
||
// // After bundling
|
||
// let foo = 123 // The contents of "foo.js"
|
||
// const bar = /* cannot express a live binding to "foo" here */
|
||
// console.log(eval('bar'))
|
||
//
|
||
// Technically a "with" statement could potentially make this work (with
|
||
// a big hit to performance), but they are deprecated and are unavailable
|
||
// in strict mode. This is a non-starter since all ESM code is strict mode.
|
||
//
|
||
// So while we still try to obey the requirement that all symbol names are
|
||
// pinned when direct eval is present, we make an exception for top-level
|
||
// symbols in an ESM file when bundling is enabled. We make no guarantee
|
||
// that "eval" will be able to reach these symbols and we allow them to be
|
||
// renamed or removed by tree shaking.
|
||
// if (p.currentScope.parent == null and p.has_es_module_syntax) {
|
||
// continue;
|
||
// }
|
||
|
||
p.symbols.items[member.value_ptr.ref.innerIndex()].must_not_be_renamed = true;
|
||
}
|
||
}
|
||
|
||
p.current_scope = current_scope.parent orelse p.panic("Internal error: attempted to call popScope() on the topmost scope", .{});
|
||
}
|
||
|
||
pub fn markExprAsParenthesized(_: *P, expr: *Expr) void {
|
||
switch (expr.data) {
|
||
.e_array => |ex| {
|
||
ex.is_parenthesized = true;
|
||
},
|
||
.e_object => |ex| {
|
||
ex.is_parenthesized = true;
|
||
},
|
||
else => {
|
||
return;
|
||
},
|
||
}
|
||
}
|
||
|
||
pub fn parseYieldExpr(p: *P, loc: logger.Loc) !ExprNodeIndex {
|
||
// Parse a yield-from expression, which yields from an iterator
|
||
const isStar = p.lexer.token == T.t_asterisk;
|
||
|
||
if (isStar) {
|
||
if (p.lexer.has_newline_before) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
var value: ?ExprNodeIndex = null;
|
||
switch (p.lexer.token) {
|
||
.t_close_brace, .t_close_paren, .t_close_bracket, .t_colon, .t_comma, .t_semicolon => {},
|
||
else => {
|
||
if (isStar or !p.lexer.has_newline_before) {
|
||
value = try p.parseExpr(.yield);
|
||
}
|
||
},
|
||
}
|
||
|
||
return p.newExpr(E.Yield{
|
||
.value = value,
|
||
.is_star = isStar,
|
||
}, loc);
|
||
}
|
||
|
||
pub fn parseProperty(p: *P, kind: Property.Kind, opts: *PropertyOpts, errors: ?*DeferredErrors) anyerror!?G.Property {
|
||
var key: Expr = Expr{ .loc = logger.Loc.Empty, .data = .{ .e_missing = E.Missing{} } };
|
||
const key_range = p.lexer.range();
|
||
var is_computed = false;
|
||
|
||
switch (p.lexer.token) {
|
||
.t_numeric_literal => {
|
||
key = p.newExpr(E.Number{
|
||
.value = p.lexer.number,
|
||
}, p.lexer.loc());
|
||
// p.checkForLegacyOctalLiteral()
|
||
try p.lexer.next();
|
||
},
|
||
.t_string_literal => {
|
||
key = try p.parseStringLiteral();
|
||
},
|
||
.t_big_integer_literal => {
|
||
key = p.newExpr(E.BigInt{ .value = p.lexer.identifier }, p.lexer.loc());
|
||
// markSyntaxFeature
|
||
try p.lexer.next();
|
||
},
|
||
.t_private_identifier => {
|
||
if (!opts.is_class or opts.ts_decorators.len > 0) {
|
||
try p.lexer.expected(.t_identifier);
|
||
}
|
||
|
||
key = p.newExpr(E.PrivateIdentifier{ .ref = p.storeNameInRef(p.lexer.identifier) catch unreachable }, p.lexer.loc());
|
||
try p.lexer.next();
|
||
},
|
||
.t_open_bracket => {
|
||
is_computed = true;
|
||
// p.markSyntaxFeature(compat.objectExtensions, p.lexer.range())
|
||
try p.lexer.next();
|
||
const wasIdentifier = p.lexer.token == .t_identifier;
|
||
const expr = try p.parseExpr(.comma);
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
|
||
// Handle index signatures
|
||
if (p.lexer.token == .t_colon and wasIdentifier and opts.is_class) {
|
||
switch (expr.data) {
|
||
.e_identifier => {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.expect(.t_close_bracket);
|
||
try p.lexer.expect(.t_colon);
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
// Skip this property entirely
|
||
return null;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_bracket);
|
||
key = expr;
|
||
},
|
||
.t_asterisk => {
|
||
if (kind != .normal or opts.is_generator) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
opts.is_generator = true;
|
||
return try p.parseProperty(.normal, opts, errors);
|
||
},
|
||
|
||
else => {
|
||
const name = p.lexer.identifier;
|
||
const raw = p.lexer.raw();
|
||
const name_range = p.lexer.range();
|
||
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
// Support contextual keywords
|
||
if (kind == .normal and !opts.is_generator) {
|
||
// Does the following token look like a key?
|
||
const couldBeModifierKeyword = p.lexer.isIdentifierOrKeyword() or switch (p.lexer.token) {
|
||
.t_open_bracket, .t_numeric_literal, .t_string_literal, .t_asterisk, .t_private_identifier => true,
|
||
else => false,
|
||
};
|
||
|
||
// If so, check for a modifier keyword
|
||
if (couldBeModifierKeyword) {
|
||
// TODO: micro-optimization, use a smaller list for non-typescript files.
|
||
if (js_lexer.PropertyModifierKeyword.List.get(name)) |keyword| {
|
||
switch (keyword) {
|
||
.p_get => {
|
||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_get) {
|
||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||
return try p.parseProperty(.get, opts, null);
|
||
}
|
||
},
|
||
|
||
.p_set => {
|
||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_set) {
|
||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||
return try p.parseProperty(.set, opts, null);
|
||
}
|
||
},
|
||
.p_async => {
|
||
if (!opts.is_async and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == .p_async and !p.lexer.has_newline_before) {
|
||
opts.is_async = true;
|
||
opts.async_range = name_range;
|
||
|
||
// p.markSyntaxFeature(ObjectAccessors, name_range)
|
||
return try p.parseProperty(kind, opts, null);
|
||
}
|
||
},
|
||
.p_static => {
|
||
if (!opts.is_static and !opts.is_async and opts.is_class and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_get) == .p_static) {
|
||
opts.is_static = true;
|
||
return try p.parseProperty(kind, opts, null);
|
||
}
|
||
},
|
||
.p_declare => {
|
||
// skip declare keyword entirely
|
||
// https://github.com/oven-sh/bun/issues/1907
|
||
if (opts.is_class and is_typescript_enabled and strings.eqlComptime(raw, "declare")) {
|
||
const scope_index = p.scopes_in_order.items.len;
|
||
if (try p.parseProperty(kind, opts, null)) |_prop| {
|
||
var prop = _prop;
|
||
if (prop.kind == .normal and prop.value == null and opts.ts_decorators.len > 0) {
|
||
prop.kind = .declare;
|
||
return prop;
|
||
}
|
||
}
|
||
|
||
p.discardScopesUpTo(scope_index);
|
||
return null;
|
||
}
|
||
},
|
||
.p_abstract => {
|
||
if (opts.is_class and is_typescript_enabled and !opts.is_ts_abstract and strings.eqlComptime(raw, "abstract")) {
|
||
opts.is_ts_abstract = true;
|
||
const scope_index = p.scopes_in_order.items.len;
|
||
if (try p.parseProperty(kind, opts, null)) |_prop| {
|
||
var prop = _prop;
|
||
if (prop.kind == .normal and prop.value == null and opts.ts_decorators.len > 0) {
|
||
prop.kind = .abstract;
|
||
return prop;
|
||
}
|
||
}
|
||
p.discardScopesUpTo(scope_index);
|
||
return null;
|
||
}
|
||
},
|
||
.p_private, .p_protected, .p_public, .p_readonly, .p_override => {
|
||
// Skip over TypeScript keywords
|
||
if (opts.is_class and is_typescript_enabled and (js_lexer.PropertyModifierKeyword.List.get(raw) orelse .p_static) == keyword) {
|
||
return try p.parseProperty(kind, opts, null);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
} else if (p.lexer.token == .t_open_brace and strings.eqlComptime(name, "static")) {
|
||
const loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
const old_fn_or_arrow_data_parse = p.fn_or_arrow_data_parse;
|
||
p.fn_or_arrow_data_parse = .{
|
||
.is_return_disallowed = true,
|
||
.allow_super_property = true,
|
||
.allow_await = .forbid_all,
|
||
};
|
||
|
||
_ = try p.pushScopeForParsePass(.class_static_init, loc);
|
||
var _parse_opts = ParseStatementOptions{};
|
||
const stmts = try p.parseStmtsUpTo(.t_close_brace, &_parse_opts);
|
||
|
||
p.popScope();
|
||
|
||
p.fn_or_arrow_data_parse = old_fn_or_arrow_data_parse;
|
||
try p.lexer.expect(.t_close_brace);
|
||
|
||
const block = p.allocator.create(
|
||
G.ClassStaticBlock,
|
||
) catch unreachable;
|
||
|
||
block.* = G.ClassStaticBlock{
|
||
.stmts = js_ast.BabyList(Stmt).init(stmts),
|
||
.loc = loc,
|
||
};
|
||
|
||
return G.Property{
|
||
.kind = .class_static_block,
|
||
.class_static_block = block,
|
||
};
|
||
}
|
||
}
|
||
|
||
// Handle invalid identifiers in property names
|
||
// https://github.com/oven-sh/bun/issues/12039
|
||
if (p.lexer.token == .t_syntax_error) {
|
||
p.log.addRangeErrorFmt(p.source, name_range, p.allocator, "Unexpected {}", .{bun.fmt.quote(name)}) catch bun.outOfMemory();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
key = p.newExpr(E.String{ .data = name }, name_range.loc);
|
||
|
||
// Parse a shorthand property
|
||
const isShorthandProperty = !opts.is_class and
|
||
kind == .normal and
|
||
p.lexer.token != .t_colon and
|
||
p.lexer.token != .t_open_paren and
|
||
p.lexer.token != .t_less_than and
|
||
!opts.is_generator and
|
||
!opts.is_async and
|
||
!js_lexer.Keywords.has(name);
|
||
|
||
if (isShorthandProperty) {
|
||
if ((p.fn_or_arrow_data_parse.allow_await != .allow_ident and
|
||
strings.eqlComptime(name, "await")) or
|
||
(p.fn_or_arrow_data_parse.allow_yield != .allow_ident and
|
||
strings.eqlComptime(name, "yield")))
|
||
{
|
||
if (strings.eqlComptime(name, "await")) {
|
||
p.log.addRangeError(p.source, name_range, "Cannot use \"await\" here") catch unreachable;
|
||
} else {
|
||
p.log.addRangeError(p.source, name_range, "Cannot use \"yield\" here") catch unreachable;
|
||
}
|
||
}
|
||
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
const value = p.newExpr(E.Identifier{ .ref = ref }, key.loc);
|
||
|
||
// Destructuring patterns have an optional default value
|
||
var initializer: ?Expr = null;
|
||
if (errors != null and p.lexer.token == .t_equals) {
|
||
errors.?.invalid_expr_default_value = p.lexer.range();
|
||
try p.lexer.next();
|
||
initializer = try p.parseExpr(.comma);
|
||
}
|
||
|
||
return G.Property{
|
||
.kind = kind,
|
||
.key = key,
|
||
.value = value,
|
||
.initializer = initializer,
|
||
.flags = Flags.Property.init(.{
|
||
.was_shorthand = true,
|
||
}),
|
||
};
|
||
}
|
||
},
|
||
}
|
||
|
||
var has_type_parameters = false;
|
||
var has_definite_assignment_assertion_operator = false;
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (opts.is_class) {
|
||
if (p.lexer.token == .t_question) {
|
||
// "class X { foo?: number }"
|
||
// "class X { foo!: number }"
|
||
try p.lexer.next();
|
||
} else if (p.lexer.token == .t_exclamation and
|
||
!p.lexer.has_newline_before and
|
||
kind == .normal and
|
||
!opts.is_async and
|
||
!opts.is_generator)
|
||
{
|
||
// "class X { foo!: number }"
|
||
try p.lexer.next();
|
||
has_definite_assignment_assertion_operator = true;
|
||
}
|
||
}
|
||
|
||
// "class X { foo?<T>(): T }"
|
||
// "const x = { foo<T>(): T {} }"
|
||
if (!has_definite_assignment_assertion_operator) {
|
||
has_type_parameters = try p.skipTypeScriptTypeParameters(.{ .allow_const_modifier = true }) != .did_not_skip_anything;
|
||
}
|
||
}
|
||
|
||
// Parse a class field with an optional initial value
|
||
if (opts.is_class and
|
||
kind == .normal and !opts.is_async and
|
||
!opts.is_generator and
|
||
p.lexer.token != .t_open_paren and
|
||
!has_type_parameters and
|
||
(p.lexer.token != .t_open_paren or has_definite_assignment_assertion_operator))
|
||
{
|
||
var initializer: ?Expr = null;
|
||
var ts_metadata = TypeScript.Metadata.default;
|
||
|
||
// Forbid the names "constructor" and "prototype" in some cases
|
||
if (!is_computed) {
|
||
switch (key.data) {
|
||
.e_string => |str| {
|
||
if (str.eqlComptime("constructor") or (opts.is_static and str.eqlComptime("prototype"))) {
|
||
// TODO: fmt error message to include string value.
|
||
p.log.addRangeError(p.source, key_range, "Invalid field name") catch unreachable;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// Skip over types
|
||
if (p.lexer.token == .t_colon) {
|
||
try p.lexer.next();
|
||
if (p.options.features.emit_decorator_metadata and opts.is_class and opts.ts_decorators.len > 0) {
|
||
ts_metadata = try p.skipTypeScriptTypeWithMetadata(.lowest);
|
||
} else {
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token == .t_equals) {
|
||
if (comptime is_typescript_enabled) {
|
||
if (!opts.declare_range.isEmpty()) {
|
||
try p.log.addRangeError(p.source, p.lexer.range(), "Class fields that use \"declare\" cannot be initialized");
|
||
}
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
// "this" and "super" property access is allowed in field initializers
|
||
const old_is_this_disallowed = p.fn_or_arrow_data_parse.is_this_disallowed;
|
||
const old_allow_super_property = p.fn_or_arrow_data_parse.allow_super_property;
|
||
p.fn_or_arrow_data_parse.is_this_disallowed = false;
|
||
p.fn_or_arrow_data_parse.allow_super_property = true;
|
||
|
||
initializer = try p.parseExpr(.comma);
|
||
|
||
p.fn_or_arrow_data_parse.is_this_disallowed = old_is_this_disallowed;
|
||
p.fn_or_arrow_data_parse.allow_super_property = old_allow_super_property;
|
||
}
|
||
|
||
// Special-case private identifiers
|
||
switch (key.data) {
|
||
.e_private_identifier => |*private| {
|
||
const name = p.loadNameFromRef(private.ref);
|
||
if (strings.eqlComptime(name, "#constructor")) {
|
||
p.log.addRangeError(p.source, key_range, "Invalid field name \"#constructor\"") catch unreachable;
|
||
}
|
||
|
||
const declare: js_ast.Symbol.Kind = if (opts.is_static)
|
||
.private_static_field
|
||
else
|
||
.private_field;
|
||
|
||
private.ref = p.declareSymbol(declare, key.loc, name) catch unreachable;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
try p.lexer.expectOrInsertSemicolon();
|
||
|
||
return G.Property{
|
||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||
.kind = kind,
|
||
.flags = Flags.Property.init(.{
|
||
.is_computed = is_computed,
|
||
.is_static = opts.is_static,
|
||
}),
|
||
.key = key,
|
||
.initializer = initializer,
|
||
.ts_metadata = ts_metadata,
|
||
};
|
||
}
|
||
|
||
// Parse a method expression
|
||
if (p.lexer.token == .t_open_paren or kind != .normal or opts.is_class or opts.is_async or opts.is_generator) {
|
||
if (p.lexer.token == .t_open_paren and kind != .get and kind != .set) {
|
||
// markSyntaxFeature object extensions
|
||
}
|
||
|
||
const loc = p.lexer.loc();
|
||
const scope_index = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||
var is_constructor = false;
|
||
|
||
// Forbid the names "constructor" and "prototype" in some cases
|
||
if (opts.is_class and !is_computed) {
|
||
switch (key.data) {
|
||
.e_string => |str| {
|
||
if (!opts.is_static and str.eqlComptime("constructor")) {
|
||
if (kind == .get) {
|
||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a getter") catch unreachable;
|
||
} else if (kind == .set) {
|
||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a setter") catch unreachable;
|
||
} else if (opts.is_async) {
|
||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be an async function") catch unreachable;
|
||
} else if (opts.is_generator) {
|
||
p.log.addRangeError(p.source, key_range, "Class constructor cannot be a generator function") catch unreachable;
|
||
} else {
|
||
is_constructor = true;
|
||
}
|
||
} else if (opts.is_static and str.eqlComptime("prototype")) {
|
||
p.log.addRangeError(p.source, key_range, "Invalid static method name \"prototype\"") catch unreachable;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
var func = try p.parseFn(null, FnOrArrowDataParse{
|
||
.async_range = opts.async_range,
|
||
.needs_async_loc = key.loc,
|
||
.has_async_range = !opts.async_range.isEmpty(),
|
||
.allow_await = if (opts.is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||
.allow_yield = if (opts.is_generator) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||
.allow_super_call = opts.class_has_extends and is_constructor,
|
||
.allow_super_property = true,
|
||
.allow_ts_decorators = opts.allow_ts_decorators,
|
||
.is_constructor = is_constructor,
|
||
.has_decorators = opts.ts_decorators.len > 0 or (opts.has_class_decorators and is_constructor),
|
||
|
||
// Only allow omitting the body if we're parsing TypeScript class
|
||
.allow_missing_body_for_type_script = is_typescript_enabled and opts.is_class,
|
||
});
|
||
|
||
opts.has_argument_decorators = opts.has_argument_decorators or p.fn_or_arrow_data_parse.has_argument_decorators;
|
||
p.fn_or_arrow_data_parse.has_argument_decorators = false;
|
||
|
||
// "class Foo { foo(): void; foo(): void {} }"
|
||
if (func.flags.contains(.is_forward_declaration)) {
|
||
// Skip this property entirely
|
||
p.popAndDiscardScope(scope_index);
|
||
return null;
|
||
}
|
||
|
||
p.popScope();
|
||
func.flags.insert(.is_unique_formal_parameters);
|
||
const value = p.newExpr(E.Function{ .func = func }, loc);
|
||
|
||
// Enforce argument rules for accessors
|
||
switch (kind) {
|
||
.get => {
|
||
if (func.args.len > 0) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, func.args[0].binding.loc);
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Getter {s} must have zero arguments", .{p.keyNameForError(key)}) catch unreachable;
|
||
}
|
||
},
|
||
.set => {
|
||
if (func.args.len != 1) {
|
||
var r = js_lexer.rangeOfIdentifier(p.source, if (func.args.len > 0) func.args[0].binding.loc else loc);
|
||
if (func.args.len > 1) {
|
||
r = js_lexer.rangeOfIdentifier(p.source, func.args[1].binding.loc);
|
||
}
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Setter {s} must have exactly 1 argument (there are {d})", .{ p.keyNameForError(key), func.args.len }) catch unreachable;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
// Special-case private identifiers
|
||
switch (key.data) {
|
||
.e_private_identifier => |*private| {
|
||
const declare: Symbol.Kind = switch (kind) {
|
||
.get => if (opts.is_static)
|
||
.private_static_get
|
||
else
|
||
.private_get,
|
||
|
||
.set => if (opts.is_static)
|
||
.private_static_set
|
||
else
|
||
.private_set,
|
||
else => if (opts.is_static)
|
||
.private_static_method
|
||
else
|
||
.private_method,
|
||
};
|
||
|
||
const name = p.loadNameFromRef(private.ref);
|
||
if (strings.eqlComptime(name, "#constructor")) {
|
||
p.log.addRangeError(p.source, key_range, "Invalid method name \"#constructor\"") catch unreachable;
|
||
}
|
||
private.ref = p.declareSymbol(declare, key.loc, name) catch unreachable;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return G.Property{
|
||
.ts_decorators = ExprNodeList.init(opts.ts_decorators),
|
||
.kind = kind,
|
||
.flags = Flags.Property.init(.{
|
||
.is_computed = is_computed,
|
||
.is_method = true,
|
||
.is_static = opts.is_static,
|
||
}),
|
||
.key = key,
|
||
.value = value,
|
||
.ts_metadata = .m_function,
|
||
};
|
||
}
|
||
|
||
// Parse an object key/value pair
|
||
try p.lexer.expect(.t_colon);
|
||
const value = try p.parseExprOrBindings(.comma, errors);
|
||
|
||
return G.Property{
|
||
.kind = kind,
|
||
.flags = Flags.Property.init(.{
|
||
.is_computed = is_computed,
|
||
}),
|
||
.key = key,
|
||
.value = value,
|
||
};
|
||
}
|
||
|
||
// By the time we call this, the identifier and type parameters have already
|
||
// been parsed. We need to start parsing from the "extends" clause.
|
||
pub fn parseClass(p: *P, class_keyword: logger.Range, name: ?js_ast.LocRef, class_opts: ParseClassOptions) !G.Class {
|
||
var extends: ?Expr = null;
|
||
var has_decorators: bool = false;
|
||
|
||
if (p.lexer.token == .t_extends) {
|
||
try p.lexer.next();
|
||
extends = try p.parseExpr(.new);
|
||
|
||
// TypeScript's type argument parser inside expressions backtracks if the
|
||
// first token after the end of the type parameter list is "{", so the
|
||
// parsed expression above will have backtracked if there are any type
|
||
// arguments. This means we have to re-parse for any type arguments here.
|
||
// This seems kind of wasteful to me but it's what the official compiler
|
||
// does and it probably doesn't have that high of a performance overhead
|
||
// because "extends" clauses aren't that frequent, so it should be ok.
|
||
if (comptime is_typescript_enabled) {
|
||
_ = try p.skipTypeScriptTypeArguments(false); // isInsideJSXElement
|
||
}
|
||
}
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (p.lexer.isContextualKeyword("implements")) {
|
||
try p.lexer.next();
|
||
|
||
while (true) {
|
||
try p.skipTypeScriptType(.lowest);
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
}
|
||
|
||
const body_loc = p.lexer.loc();
|
||
try p.lexer.expect(T.t_open_brace);
|
||
var properties = ListManaged(G.Property).init(p.allocator);
|
||
|
||
// Allow "in" and private fields inside class bodies
|
||
const old_allow_in = p.allow_in;
|
||
const old_allow_private_identifiers = p.allow_private_identifiers;
|
||
p.allow_in = true;
|
||
p.allow_private_identifiers = true;
|
||
|
||
// A scope is needed for private identifiers
|
||
const scopeIndex = p.pushScopeForParsePass(.class_body, body_loc) catch unreachable;
|
||
|
||
var opts = PropertyOpts{ .is_class = true, .allow_ts_decorators = class_opts.allow_ts_decorators, .class_has_extends = extends != null };
|
||
while (!p.lexer.token.isCloseBraceOrEOF()) {
|
||
if (p.lexer.token == .t_semicolon) {
|
||
try p.lexer.next();
|
||
continue;
|
||
}
|
||
|
||
opts = PropertyOpts{ .is_class = true, .allow_ts_decorators = class_opts.allow_ts_decorators, .class_has_extends = extends != null, .has_argument_decorators = false };
|
||
|
||
// Parse decorators for this property
|
||
const first_decorator_loc = p.lexer.loc();
|
||
if (opts.allow_ts_decorators) {
|
||
opts.ts_decorators = try p.parseTypeScriptDecorators();
|
||
opts.has_class_decorators = class_opts.ts_decorators.len > 0;
|
||
has_decorators = has_decorators or opts.ts_decorators.len > 0;
|
||
} else {
|
||
opts.ts_decorators = &[_]Expr{};
|
||
}
|
||
|
||
// This property may turn out to be a type in TypeScript, which should be ignored
|
||
if (try p.parseProperty(.normal, &opts, null)) |property| {
|
||
properties.append(property) catch unreachable;
|
||
|
||
// Forbid decorators on class constructors
|
||
if (opts.ts_decorators.len > 0) {
|
||
switch ((property.key orelse p.panic("Internal error: Expected property {any} to have a key.", .{property})).data) {
|
||
.e_string => |str| {
|
||
if (str.eqlComptime("constructor")) {
|
||
p.log.addError(p.source, first_decorator_loc, "TypeScript does not allow decorators on class constructors") catch unreachable;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
has_decorators = has_decorators or opts.has_argument_decorators;
|
||
}
|
||
}
|
||
|
||
if (class_opts.is_type_script_declare) {
|
||
p.popAndDiscardScope(scopeIndex);
|
||
} else {
|
||
p.popScope();
|
||
}
|
||
|
||
p.allow_in = old_allow_in;
|
||
p.allow_private_identifiers = old_allow_private_identifiers;
|
||
const close_brace_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_close_brace);
|
||
|
||
return G.Class{
|
||
.class_name = name,
|
||
.extends = extends,
|
||
.close_brace_loc = close_brace_loc,
|
||
.ts_decorators = ExprNodeList.init(class_opts.ts_decorators),
|
||
.class_keyword = class_keyword,
|
||
.body_loc = body_loc,
|
||
.properties = properties.items,
|
||
.has_decorators = has_decorators or class_opts.ts_decorators.len > 0,
|
||
};
|
||
}
|
||
|
||
pub fn skipTypeScriptTypeArguments(p: *P, comptime isInsideJSXElement: bool) anyerror!bool {
|
||
p.markTypeScriptOnly();
|
||
switch (p.lexer.token) {
|
||
.t_less_than, .t_less_than_equals, .t_less_than_less_than, .t_less_than_less_than_equals => {},
|
||
else => {
|
||
return false;
|
||
},
|
||
}
|
||
|
||
try p.lexer.expectLessThan(false);
|
||
|
||
while (true) {
|
||
try p.skipTypeScriptType(.lowest);
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// This type argument list must end with a ">"
|
||
try p.lexer.expectGreaterThan(isInsideJSXElement);
|
||
return true;
|
||
}
|
||
|
||
pub fn parseTemplateParts(p: *P, include_raw: bool) ![]E.TemplatePart {
|
||
var parts = ListManaged(E.TemplatePart).initCapacity(p.allocator, 1) catch unreachable;
|
||
// Allow "in" inside template literals
|
||
const oldAllowIn = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
parseTemplatePart: while (true) {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.lowest);
|
||
const tail_loc = p.lexer.loc();
|
||
try p.lexer.rescanCloseBraceAsTemplateToken();
|
||
|
||
const tail: E.Template.Contents = brk: {
|
||
if (!include_raw) break :brk .{ .cooked = try p.lexer.toEString() };
|
||
break :brk .{ .raw = p.lexer.rawTemplateContents() };
|
||
};
|
||
|
||
parts.append(E.TemplatePart{
|
||
.value = value,
|
||
.tail_loc = tail_loc,
|
||
.tail = tail,
|
||
}) catch unreachable;
|
||
|
||
if (p.lexer.token == .t_template_tail) {
|
||
try p.lexer.next();
|
||
break :parseTemplatePart;
|
||
}
|
||
if (comptime Environment.allow_assert)
|
||
assert(p.lexer.token != .t_end_of_file);
|
||
}
|
||
|
||
p.allow_in = oldAllowIn;
|
||
|
||
return parts.items;
|
||
}
|
||
|
||
// This assumes the caller has already checked for TStringLiteral or TNoSubstitutionTemplateLiteral
|
||
pub fn parseStringLiteral(p: *P) anyerror!Expr {
|
||
const loc = p.lexer.loc();
|
||
var str = try p.lexer.toEString();
|
||
str.prefer_template = p.lexer.token == .t_no_substitution_template_literal;
|
||
|
||
const expr = p.newExpr(str, loc);
|
||
try p.lexer.next();
|
||
return expr;
|
||
}
|
||
|
||
pub fn parseCallArgs(p: *P) anyerror!ExprListLoc {
|
||
// Allow "in" inside call arguments
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
defer p.allow_in = old_allow_in;
|
||
|
||
var args = ListManaged(Expr).init(p.allocator);
|
||
try p.lexer.expect(.t_open_paren);
|
||
|
||
while (p.lexer.token != .t_close_paren) {
|
||
const loc = p.lexer.loc();
|
||
const is_spread = p.lexer.token == .t_dot_dot_dot;
|
||
if (is_spread) {
|
||
// p.mark_syntax_feature(compat.rest_argument, p.lexer.range());
|
||
try p.lexer.next();
|
||
}
|
||
var arg = try p.parseExpr(.comma);
|
||
if (is_spread) {
|
||
arg = p.newExpr(E.Spread{ .value = arg }, loc);
|
||
}
|
||
args.append(arg) catch unreachable;
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
try p.lexer.next();
|
||
}
|
||
const close_paren_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_close_paren);
|
||
return ExprListLoc{ .list = ExprNodeList.fromList(args), .loc = close_paren_loc };
|
||
}
|
||
|
||
pub fn parseSuffix(p: *P, _left: Expr, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||
var left = _left;
|
||
var optional_chain: ?js_ast.OptionalChain = null;
|
||
while (true) {
|
||
if (p.lexer.loc().start == p.after_arrow_body_loc.start) {
|
||
while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_comma => {
|
||
if (level.gte(.comma)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{
|
||
.op = .bin_comma,
|
||
.left = left,
|
||
.right = try p.parseExpr(.comma),
|
||
}, left.loc);
|
||
},
|
||
else => {
|
||
return left;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// Stop now if this token is forbidden to follow a TypeScript "as" cast
|
||
if (p.forbid_suffix_after_as_loc.start > -1 and p.lexer.loc().start == p.forbid_suffix_after_as_loc.start) {
|
||
return left;
|
||
}
|
||
}
|
||
|
||
// Reset the optional chain flag by default. That way we won't accidentally
|
||
// treat "c.d" as OptionalChainContinue in "a?.b + c.d".
|
||
const old_optional_chain = optional_chain;
|
||
optional_chain = null;
|
||
switch (p.lexer.token) {
|
||
.t_dot => {
|
||
try p.lexer.next();
|
||
if (p.lexer.token == .t_private_identifier and p.allow_private_identifiers) {
|
||
// "a.#b"
|
||
// "a?.b.#c"
|
||
switch (left.data) {
|
||
.e_super => {
|
||
try p.lexer.expected(.t_identifier);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
const name = p.lexer.identifier;
|
||
const name_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
left = p.newExpr(E.Index{
|
||
.target = left,
|
||
.index = p.newExpr(
|
||
E.PrivateIdentifier{
|
||
.ref = ref,
|
||
},
|
||
name_loc,
|
||
),
|
||
.optional_chain = old_optional_chain,
|
||
}, left.loc);
|
||
} else {
|
||
// "a.b"
|
||
// "a?.b.c"
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
|
||
const name = p.lexer.identifier;
|
||
const name_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
left = p.newExpr(E.Dot{ .target = left, .name = name, .name_loc = name_loc, .optional_chain = old_optional_chain }, left.loc);
|
||
}
|
||
|
||
optional_chain = old_optional_chain;
|
||
},
|
||
.t_question_dot => {
|
||
try p.lexer.next();
|
||
var optional_start: ?js_ast.OptionalChain = js_ast.OptionalChain.start;
|
||
|
||
// Remove unnecessary optional chains
|
||
if (p.options.features.minify_syntax) {
|
||
const result = SideEffects.toNullOrUndefined(p, left.data);
|
||
if (result.ok and !result.value) {
|
||
optional_start = null;
|
||
}
|
||
}
|
||
|
||
switch (p.lexer.token) {
|
||
.t_open_bracket => {
|
||
// "a?.[b]"
|
||
try p.lexer.next();
|
||
|
||
// allow "in" inside the brackets;
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
const index = try p.parseExpr(.lowest);
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
try p.lexer.expect(.t_close_bracket);
|
||
left = p.newExpr(
|
||
E.Index{ .target = left, .index = index, .optional_chain = optional_start },
|
||
left.loc,
|
||
);
|
||
},
|
||
|
||
.t_open_paren => {
|
||
// "a?.()"
|
||
if (level.gte(.call)) {
|
||
return left;
|
||
}
|
||
|
||
const list_loc = try p.parseCallArgs();
|
||
left = p.newExpr(E.Call{
|
||
.target = left,
|
||
.args = list_loc.list,
|
||
.close_paren_loc = list_loc.loc,
|
||
.optional_chain = optional_start,
|
||
}, left.loc);
|
||
},
|
||
.t_less_than, .t_less_than_less_than => {
|
||
// "a?.<T>()"
|
||
if (comptime !is_typescript_enabled) {
|
||
try p.lexer.expected(.t_identifier);
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
_ = try p.skipTypeScriptTypeArguments(false);
|
||
if (p.lexer.token != .t_open_paren) {
|
||
try p.lexer.expected(.t_open_paren);
|
||
}
|
||
|
||
if (level.gte(.call)) {
|
||
return left;
|
||
}
|
||
|
||
const list_loc = try p.parseCallArgs();
|
||
left = p.newExpr(E.Call{
|
||
.target = left,
|
||
.args = list_loc.list,
|
||
.close_paren_loc = list_loc.loc,
|
||
.optional_chain = optional_start,
|
||
}, left.loc);
|
||
},
|
||
else => {
|
||
if (p.lexer.token == .t_private_identifier and p.allow_private_identifiers) {
|
||
// "a?.#b"
|
||
const name = p.lexer.identifier;
|
||
const name_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
left = p.newExpr(E.Index{
|
||
.target = left,
|
||
.index = p.newExpr(
|
||
E.PrivateIdentifier{
|
||
.ref = ref,
|
||
},
|
||
name_loc,
|
||
),
|
||
.optional_chain = optional_start,
|
||
}, left.loc);
|
||
} else {
|
||
// "a?.b"
|
||
if (!p.lexer.isIdentifierOrKeyword()) {
|
||
try p.lexer.expect(.t_identifier);
|
||
}
|
||
const name = p.lexer.identifier;
|
||
const name_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
|
||
left = p.newExpr(E.Dot{
|
||
.target = left,
|
||
.name = name,
|
||
.name_loc = name_loc,
|
||
.optional_chain = optional_start,
|
||
}, left.loc);
|
||
}
|
||
},
|
||
}
|
||
|
||
// Only continue if we have started
|
||
if ((optional_start orelse .continuation) == .start) {
|
||
optional_chain = .continuation;
|
||
}
|
||
},
|
||
.t_no_substitution_template_literal => {
|
||
if (old_optional_chain != null) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||
}
|
||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||
const head = p.lexer.rawTemplateContents();
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Template{
|
||
.tag = left,
|
||
.head = .{ .raw = head },
|
||
}, left.loc);
|
||
},
|
||
.t_template_head => {
|
||
if (old_optional_chain != null) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Template literals cannot have an optional chain as a tag") catch unreachable;
|
||
}
|
||
// p.markSyntaxFeature(compat.TemplateLiteral, p.lexer.Range());
|
||
const head = p.lexer.rawTemplateContents();
|
||
const partsGroup = try p.parseTemplateParts(true);
|
||
const tag = left;
|
||
left = p.newExpr(E.Template{
|
||
.tag = tag,
|
||
.head = .{ .raw = head },
|
||
.parts = partsGroup,
|
||
}, left.loc);
|
||
},
|
||
.t_open_bracket => {
|
||
// When parsing a decorator, ignore EIndex expressions since they may be
|
||
// part of a computed property:
|
||
//
|
||
// class Foo {
|
||
// @foo ['computed']() {}
|
||
// }
|
||
//
|
||
// This matches the behavior of the TypeScript compiler.
|
||
if (flags == .ts_decorator) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
// Allow "in" inside the brackets
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
const index = try p.parseExpr(.lowest);
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
try p.lexer.expect(.t_close_bracket);
|
||
|
||
left = p.newExpr(E.Index{
|
||
.target = left,
|
||
.index = index,
|
||
.optional_chain = old_optional_chain,
|
||
}, left.loc);
|
||
optional_chain = old_optional_chain;
|
||
},
|
||
.t_open_paren => {
|
||
if (level.gte(.call)) {
|
||
return left;
|
||
}
|
||
|
||
const list_loc = try p.parseCallArgs();
|
||
left = p.newExpr(
|
||
E.Call{
|
||
.target = left,
|
||
.args = list_loc.list,
|
||
.close_paren_loc = list_loc.loc,
|
||
.optional_chain = old_optional_chain,
|
||
},
|
||
left.loc,
|
||
);
|
||
optional_chain = old_optional_chain;
|
||
},
|
||
.t_question => {
|
||
if (level.gte(.conditional)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
|
||
// Stop now if we're parsing one of these:
|
||
// "(a?) => {}"
|
||
// "(a?: b) => {}"
|
||
// "(a?, b?) => {}"
|
||
if (is_typescript_enabled and left.loc.start == p.latest_arrow_arg_loc.start and (p.lexer.token == .t_colon or
|
||
p.lexer.token == .t_close_paren or p.lexer.token == .t_comma))
|
||
{
|
||
if (errors == null) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
errors.?.invalid_expr_after_question = p.lexer.range();
|
||
return left;
|
||
}
|
||
|
||
// Allow "in" in between "?" and ":"
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
const yes = try p.parseExpr(.comma);
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
try p.lexer.expect(.t_colon);
|
||
const no = try p.parseExpr(.comma);
|
||
|
||
left = p.newExpr(E.If{
|
||
.test_ = left,
|
||
.yes = yes,
|
||
.no = no,
|
||
}, left.loc);
|
||
},
|
||
.t_exclamation => {
|
||
// Skip over TypeScript non-null assertions
|
||
if (p.lexer.has_newline_before) {
|
||
return left;
|
||
}
|
||
|
||
if (!is_typescript_enabled) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
if (level.gte(.postfix)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
optional_chain = old_optional_chain;
|
||
},
|
||
.t_minus_minus => {
|
||
if (p.lexer.has_newline_before or level.gte(.postfix)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Unary{ .op = .un_post_dec, .value = left }, left.loc);
|
||
},
|
||
.t_plus_plus => {
|
||
if (p.lexer.has_newline_before or level.gte(.postfix)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Unary{ .op = .un_post_inc, .value = left }, left.loc);
|
||
},
|
||
.t_comma => {
|
||
if (level.gte(.comma)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_comma, .left = left, .right = try p.parseExpr(.comma) }, left.loc);
|
||
},
|
||
.t_plus => {
|
||
if (level.gte(.add)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_add, .left = left, .right = try p.parseExpr(.add) }, left.loc);
|
||
},
|
||
.t_plus_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_add_assign, .left = left, .right = try p.parseExpr(@as(Op.Level, @enumFromInt(@intFromEnum(Op.Level.assign) - 1))) }, left.loc);
|
||
},
|
||
.t_minus => {
|
||
if (level.gte(.add)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_sub, .left = left, .right = try p.parseExpr(.add) }, left.loc);
|
||
},
|
||
.t_minus_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_sub_assign, .left = left, .right = try p.parseExpr(Op.Level.sub(Op.Level.assign, 1)) }, left.loc);
|
||
},
|
||
.t_asterisk => {
|
||
if (level.gte(.multiply)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_mul, .left = left, .right = try p.parseExpr(.multiply) }, left.loc);
|
||
},
|
||
.t_asterisk_asterisk => {
|
||
if (level.gte(.exponentiation)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_pow, .left = left, .right = try p.parseExpr(Op.Level.exponentiation.sub(1)) }, left.loc);
|
||
},
|
||
.t_asterisk_asterisk_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_pow_assign, .left = left, .right = try p.parseExpr(Op.Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_asterisk_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_mul_assign, .left = left, .right = try p.parseExpr(Op.Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_percent => {
|
||
if (level.gte(.multiply)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_rem, .left = left, .right = try p.parseExpr(Op.Level.multiply) }, left.loc);
|
||
},
|
||
.t_percent_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_rem_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_slash => {
|
||
if (level.gte(.multiply)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_div, .left = left, .right = try p.parseExpr(Level.multiply) }, left.loc);
|
||
},
|
||
.t_slash_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_div_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_equals_equals => {
|
||
if (level.gte(.equals)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_loose_eq, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||
},
|
||
.t_exclamation_equals => {
|
||
if (level.gte(.equals)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_loose_ne, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||
},
|
||
.t_equals_equals_equals => {
|
||
if (level.gte(.equals)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_strict_eq, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||
},
|
||
.t_exclamation_equals_equals => {
|
||
if (level.gte(.equals)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_strict_ne, .left = left, .right = try p.parseExpr(Level.equals) }, left.loc);
|
||
},
|
||
.t_less_than => {
|
||
// TypeScript allows type arguments to be specified with angle brackets
|
||
// inside an expression. Unlike in other languages, this unfortunately
|
||
// appears to require backtracking to parse.
|
||
if (is_typescript_enabled and p.trySkipTypeScriptTypeArgumentsWithBacktracking()) {
|
||
optional_chain = old_optional_chain;
|
||
continue;
|
||
}
|
||
|
||
if (level.gte(.compare)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_lt, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
.t_less_than_equals => {
|
||
if (level.gte(.compare)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_le, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
.t_greater_than => {
|
||
if (level.gte(.compare)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_gt, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
.t_greater_than_equals => {
|
||
if (level.gte(.compare)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_ge, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
.t_less_than_less_than => {
|
||
// TypeScript allows type arguments to be specified with angle brackets
|
||
// inside an expression. Unlike in other languages, this unfortunately
|
||
// appears to require backtracking to parse.
|
||
if (is_typescript_enabled and p.trySkipTypeScriptTypeArgumentsWithBacktracking()) {
|
||
optional_chain = old_optional_chain;
|
||
continue;
|
||
}
|
||
|
||
if (level.gte(.shift)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_shl, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||
},
|
||
.t_less_than_less_than_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_shl_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_greater_than_greater_than => {
|
||
if (level.gte(.shift)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_shr, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||
},
|
||
.t_greater_than_greater_than_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_shr_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_greater_than_greater_than_greater_than => {
|
||
if (level.gte(.shift)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_u_shr, .left = left, .right = try p.parseExpr(.shift) }, left.loc);
|
||
},
|
||
.t_greater_than_greater_than_greater_than_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_u_shr_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_question_question => {
|
||
if (level.gte(.nullish_coalescing)) {
|
||
return left;
|
||
}
|
||
try p.lexer.next();
|
||
const prev = left;
|
||
left = p.newExpr(E.Binary{ .op = .bin_nullish_coalescing, .left = prev, .right = try p.parseExpr(.nullish_coalescing) }, left.loc);
|
||
},
|
||
.t_question_question_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_nullish_coalescing_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_bar_bar => {
|
||
if (level.gte(.logical_or)) {
|
||
return left;
|
||
}
|
||
|
||
// Prevent "||" inside "??" from the right
|
||
if (level.eql(.nullish_coalescing)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
const right = try p.parseExpr(.logical_or);
|
||
left = p.newExpr(E.Binary{ .op = Op.Code.bin_logical_or, .left = left, .right = right }, left.loc);
|
||
|
||
if (level.lt(.nullish_coalescing)) {
|
||
left = try p.parseSuffix(left, Level.nullish_coalescing.addF(1), null, flags);
|
||
|
||
if (p.lexer.token == .t_question_question) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
}
|
||
},
|
||
.t_bar_bar_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_logical_or_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_ampersand_ampersand => {
|
||
if (level.gte(.logical_and)) {
|
||
return left;
|
||
}
|
||
|
||
// Prevent "&&" inside "??" from the right
|
||
if (level.eql(.nullish_coalescing)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_logical_and, .left = left, .right = try p.parseExpr(.logical_and) }, left.loc);
|
||
|
||
// Prevent "&&" inside "??" from the left
|
||
if (level.lt(.nullish_coalescing)) {
|
||
left = try p.parseSuffix(left, Level.nullish_coalescing.addF(1), null, flags);
|
||
|
||
if (p.lexer.token == .t_question_question) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
}
|
||
},
|
||
.t_ampersand_ampersand_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_logical_and_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_bar => {
|
||
if (level.gte(.bitwise_or)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_or, .left = left, .right = try p.parseExpr(.bitwise_or) }, left.loc);
|
||
},
|
||
.t_bar_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_or_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_ampersand => {
|
||
if (level.gte(.bitwise_and)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_and, .left = left, .right = try p.parseExpr(.bitwise_and) }, left.loc);
|
||
},
|
||
.t_ampersand_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_and_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_caret => {
|
||
if (level.gte(.bitwise_xor)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_xor, .left = left, .right = try p.parseExpr(.bitwise_xor) }, left.loc);
|
||
},
|
||
.t_caret_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_bitwise_xor_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_equals => {
|
||
if (level.gte(.assign)) {
|
||
return left;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
left = p.newExpr(E.Binary{ .op = .bin_assign, .left = left, .right = try p.parseExpr(Level.assign.sub(1)) }, left.loc);
|
||
},
|
||
.t_in => {
|
||
if (level.gte(.compare) or !p.allow_in) {
|
||
return left;
|
||
}
|
||
|
||
// Warn about "!a in b" instead of "!(a in b)"
|
||
switch (left.data) {
|
||
.e_unary => |unary| {
|
||
if (unary.op == .un_not) {
|
||
// TODO:
|
||
// p.log.addRangeWarning(source: ?Source, r: Range, text: string)
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_in, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
.t_instanceof => {
|
||
if (level.gte(.compare)) {
|
||
return left;
|
||
}
|
||
|
||
// Warn about "!a instanceof b" instead of "!(a instanceof b)". Here's an
|
||
// example of code with this problem: https://github.com/mrdoob/three.js/pull/11182.
|
||
if (!p.options.suppress_warnings_about_weird_code) {
|
||
switch (left.data) {
|
||
.e_unary => |unary| {
|
||
if (unary.op == .un_not) {
|
||
// TODO:
|
||
// p.log.addRangeWarning(source: ?Source, r: Range, text: string)
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
try p.lexer.next();
|
||
left = p.newExpr(E.Binary{ .op = .bin_instanceof, .left = left, .right = try p.parseExpr(.compare) }, left.loc);
|
||
},
|
||
else => {
|
||
// Handle the TypeScript "as" operator
|
||
// Handle the TypeScript "satisfies" operator
|
||
if (is_typescript_enabled and level.lt(.compare) and !p.lexer.has_newline_before and (p.lexer.isContextualKeyword("as") or p.lexer.isContextualKeyword("satisfies"))) {
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
|
||
// These tokens are not allowed to follow a cast expression. This isn't
|
||
// an outright error because it may be on a new line, in which case it's
|
||
// the start of a new expression when it's after a cast:
|
||
//
|
||
// x = y as z
|
||
// (something);
|
||
//
|
||
switch (p.lexer.token) {
|
||
.t_plus_plus,
|
||
.t_minus_minus,
|
||
.t_no_substitution_template_literal,
|
||
.t_template_head,
|
||
.t_open_paren,
|
||
.t_open_bracket,
|
||
.t_question_dot,
|
||
=> {
|
||
p.forbid_suffix_after_as_loc = p.lexer.loc();
|
||
return left;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (p.lexer.token.isAssign()) {
|
||
p.forbid_suffix_after_as_loc = p.lexer.loc();
|
||
return left;
|
||
}
|
||
continue;
|
||
}
|
||
|
||
return left;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
pub const MacroVisitor = struct {
|
||
p: *P,
|
||
|
||
loc: logger.Loc,
|
||
|
||
pub fn visitImport(this: MacroVisitor, import_data: js_ast.Macro.JSNode.ImportData) void {
|
||
var p = this.p;
|
||
|
||
const record_id = p.addImportRecord(.stmt, this.loc, import_data.path);
|
||
var record: *ImportRecord = &p.import_records.items[record_id];
|
||
record.was_injected_by_macro = true;
|
||
p.macro.imports.ensureUnusedCapacity(import_data.import.items.len) catch unreachable;
|
||
var import = import_data.import;
|
||
import.import_record_index = record_id;
|
||
|
||
p.is_import_item.ensureUnusedCapacity(
|
||
p.allocator,
|
||
@as(u32, @intCast(p.is_import_item.count() + import.items.len)),
|
||
) catch unreachable;
|
||
|
||
for (import.items) |*clause| {
|
||
const import_hash_name = clause.original_name;
|
||
|
||
if (strings.eqlComptime(clause.alias, "default")) {
|
||
const non_unique_name = record.path.name.nonUniqueNameString(p.allocator) catch unreachable;
|
||
clause.original_name = std.fmt.allocPrint(p.allocator, "{s}_default", .{non_unique_name}) catch unreachable;
|
||
record.contains_default_alias = true;
|
||
}
|
||
const name_ref = p.declareSymbol(.import, this.loc, clause.original_name) catch unreachable;
|
||
clause.name = LocRef{ .loc = this.loc, .ref = name_ref };
|
||
|
||
p.is_import_item.putAssumeCapacity(name_ref, {});
|
||
|
||
p.macro.imports.putAssumeCapacity(js_ast.Macro.JSNode.SymbolMap.generateImportHash(import_hash_name, import_data.path), name_ref);
|
||
|
||
// Ensure we don't accidentally think this is an export from
|
||
}
|
||
|
||
p.macro.prepend_stmts.append(p.s(import, this.loc)) catch unreachable;
|
||
}
|
||
};
|
||
|
||
pub fn panic(p: *P, comptime fmt: string, args: anytype) noreturn {
|
||
p.panicLoc(fmt, args, null);
|
||
@setCold(true);
|
||
}
|
||
|
||
pub fn panicLoc(p: *P, comptime fmt: string, args: anytype, loc: ?logger.Loc) noreturn {
|
||
var panic_buffer = p.allocator.alloc(u8, 32 * 1024) catch unreachable;
|
||
var panic_stream = std.io.fixedBufferStream(panic_buffer);
|
||
|
||
// panic during visit pass leaves the lexer at the end, which
|
||
// would make this location absolutely useless.
|
||
const location = loc orelse p.lexer.loc();
|
||
if (location.start < p.lexer.source.contents.len and !location.isEmpty()) {
|
||
p.log.addRangeErrorFmt(
|
||
p.source,
|
||
.{ .loc = location },
|
||
p.allocator,
|
||
"panic here",
|
||
.{},
|
||
) catch bun.outOfMemory();
|
||
}
|
||
|
||
p.log.level = .verbose;
|
||
p.log.print(panic_stream.writer()) catch unreachable;
|
||
|
||
Output.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]});
|
||
}
|
||
|
||
pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||
const loc = p.lexer.loc();
|
||
const l = @intFromEnum(level);
|
||
// Output.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) });
|
||
|
||
switch (p.lexer.token) {
|
||
.t_super => {
|
||
const superRange = p.lexer.range();
|
||
try p.lexer.next();
|
||
|
||
switch (p.lexer.token) {
|
||
.t_open_paren => {
|
||
if (l < @intFromEnum(Level.call) and p.fn_or_arrow_data_parse.allow_super_call) {
|
||
return p.newExpr(E.Super{}, loc);
|
||
}
|
||
},
|
||
.t_dot, .t_open_bracket => {
|
||
if (p.fn_or_arrow_data_parse.allow_super_property) {
|
||
return p.newExpr(E.Super{}, loc);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
p.log.addRangeError(p.source, superRange, "Unexpected \"super\"") catch unreachable;
|
||
return p.newExpr(E.Super{}, loc);
|
||
},
|
||
.t_open_paren => {
|
||
try p.lexer.next();
|
||
|
||
// Arrow functions aren't allowed in the middle of expressions
|
||
if (level.gt(.assign)) {
|
||
// Allow "in" inside parentheses
|
||
const oldAllowIn = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
var value = try p.parseExpr(Level.lowest);
|
||
p.markExprAsParenthesized(&value);
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
p.allow_in = oldAllowIn;
|
||
return value;
|
||
}
|
||
|
||
return p.parseParenExpr(loc, level, ParenExprOpts{});
|
||
},
|
||
.t_false => {
|
||
try p.lexer.next();
|
||
return p.newExpr(E.Boolean{ .value = false }, loc);
|
||
},
|
||
.t_true => {
|
||
try p.lexer.next();
|
||
return p.newExpr(E.Boolean{ .value = true }, loc);
|
||
},
|
||
.t_null => {
|
||
try p.lexer.next();
|
||
return p.newExpr(E.Null{}, loc);
|
||
},
|
||
.t_this => {
|
||
if (p.fn_or_arrow_data_parse.is_this_disallowed) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"this\" here") catch unreachable;
|
||
}
|
||
try p.lexer.next();
|
||
return Expr{ .data = Prefill.Data.This, .loc = loc };
|
||
},
|
||
.t_private_identifier => {
|
||
if (!p.allow_private_identifiers or !p.allow_in or level.gte(.compare)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
const name = p.lexer.identifier;
|
||
try p.lexer.next();
|
||
|
||
// Check for "#foo in bar"
|
||
if (p.lexer.token != .t_in) {
|
||
try p.lexer.expected(.t_in);
|
||
}
|
||
|
||
return p.newExpr(E.PrivateIdentifier{ .ref = try p.storeNameInRef(name) }, loc);
|
||
},
|
||
.t_identifier => {
|
||
const name = p.lexer.identifier;
|
||
const name_range = p.lexer.range();
|
||
const raw = p.lexer.raw();
|
||
|
||
try p.lexer.next();
|
||
|
||
// Handle async and await expressions
|
||
switch (AsyncPrefixExpression.find(name)) {
|
||
.is_async => {
|
||
if ((raw.ptr == name.ptr and raw.len == name.len) or AsyncPrefixExpression.find(raw) == .is_async) {
|
||
return try p.parseAsyncPrefixExpr(name_range, level);
|
||
}
|
||
},
|
||
|
||
.is_await => {
|
||
switch (p.fn_or_arrow_data_parse.allow_await) {
|
||
.forbid_all => {
|
||
p.log.addRangeError(p.source, name_range, "The keyword \"await\" cannot be used here") catch unreachable;
|
||
},
|
||
.allow_expr => {
|
||
if (AsyncPrefixExpression.find(raw) != .is_await) {
|
||
p.log.addRangeError(p.source, name_range, "The keyword \"await\" cannot be escaped") catch unreachable;
|
||
} else {
|
||
if (p.fn_or_arrow_data_parse.is_top_level) {
|
||
p.top_level_await_keyword = name_range;
|
||
}
|
||
|
||
if (p.fn_or_arrow_data_parse.track_arrow_arg_errors) {
|
||
p.fn_or_arrow_data_parse.arrow_arg_errors.invalid_expr_await = name_range;
|
||
}
|
||
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == T.t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Await{ .value = value }, loc);
|
||
}
|
||
},
|
||
.allow_ident => {
|
||
p.lexer.prev_token_was_await_keyword = true;
|
||
p.lexer.await_keyword_loc = name_range.loc;
|
||
p.lexer.fn_or_arrow_start_loc = p.fn_or_arrow_data_parse.needs_async_loc;
|
||
},
|
||
}
|
||
},
|
||
|
||
.is_yield => {
|
||
switch (p.fn_or_arrow_data_parse.allow_yield) {
|
||
.forbid_all => {
|
||
p.log.addRangeError(p.source, name_range, "The keyword \"yield\" cannot be used here") catch unreachable;
|
||
},
|
||
.allow_expr => {
|
||
if (AsyncPrefixExpression.find(raw) != .is_yield) {
|
||
p.log.addRangeError(p.source, name_range, "The keyword \"yield\" cannot be escaped") catch unreachable;
|
||
} else {
|
||
if (level.gt(.assign)) {
|
||
p.log.addRangeError(p.source, name_range, "Cannot use a \"yield\" here without parentheses") catch unreachable;
|
||
}
|
||
|
||
if (p.fn_or_arrow_data_parse.track_arrow_arg_errors) {
|
||
p.fn_or_arrow_data_parse.arrow_arg_errors.invalid_expr_yield = name_range;
|
||
}
|
||
|
||
return p.parseYieldExpr(loc);
|
||
}
|
||
},
|
||
// .allow_ident => {
|
||
|
||
// },
|
||
else => {
|
||
// Try to gracefully recover if "yield" is used in the wrong place
|
||
if (!p.lexer.has_newline_before) {
|
||
switch (p.lexer.token) {
|
||
.t_null, .t_identifier, .t_false, .t_true, .t_numeric_literal, .t_big_integer_literal, .t_string_literal => {
|
||
p.log.addRangeError(p.source, name_range, "Cannot use \"yield\" outside a generator function") catch unreachable;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
},
|
||
}
|
||
},
|
||
.none => {},
|
||
}
|
||
|
||
// Handle the start of an arrow expression
|
||
if (p.lexer.token == .t_equals_greater_than and level.lte(.assign)) {
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
var args = p.allocator.alloc(Arg, 1) catch unreachable;
|
||
args[0] = Arg{ .binding = p.b(B.Identifier{
|
||
.ref = ref,
|
||
}, loc) };
|
||
|
||
_ = p.pushScopeForParsePass(.function_args, loc) catch unreachable;
|
||
defer p.popScope();
|
||
|
||
var fn_or_arrow_data = FnOrArrowDataParse{
|
||
.needs_async_loc = loc,
|
||
};
|
||
return p.newExpr(try p.parseArrowBody(args, &fn_or_arrow_data), loc);
|
||
}
|
||
|
||
const ref = p.storeNameInRef(name) catch unreachable;
|
||
|
||
return Expr.initIdentifier(ref, loc);
|
||
},
|
||
.t_string_literal, .t_no_substitution_template_literal => {
|
||
return try p.parseStringLiteral();
|
||
},
|
||
.t_template_head => {
|
||
const head = try p.lexer.toEString();
|
||
|
||
const parts = try p.parseTemplateParts(false);
|
||
|
||
// Check if TemplateLiteral is unsupported. We don't care for this product.`
|
||
// if ()
|
||
|
||
return p.newExpr(E.Template{
|
||
.head = .{ .cooked = head },
|
||
.parts = parts,
|
||
}, loc);
|
||
},
|
||
.t_numeric_literal => {
|
||
const value = p.newExpr(E.Number{ .value = p.lexer.number }, loc);
|
||
// p.checkForLegacyOctalLiteral()
|
||
try p.lexer.next();
|
||
return value;
|
||
},
|
||
.t_big_integer_literal => {
|
||
const value = p.lexer.identifier;
|
||
// markSyntaxFeature bigInt
|
||
try p.lexer.next();
|
||
return p.newExpr(E.BigInt{ .value = value }, loc);
|
||
},
|
||
.t_slash, .t_slash_equals => {
|
||
try p.lexer.scanRegExp();
|
||
// always set regex_flags_start to null to make sure we don't accidentally use the wrong value later
|
||
defer p.lexer.regex_flags_start = null;
|
||
const value = p.lexer.raw();
|
||
try p.lexer.next();
|
||
|
||
return p.newExpr(E.RegExp{ .value = value, .flags_offset = p.lexer.regex_flags_start }, loc);
|
||
},
|
||
.t_void => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{
|
||
.op = .un_void,
|
||
.value = value,
|
||
}, loc);
|
||
},
|
||
.t_typeof => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_typeof, .value = value }, loc);
|
||
},
|
||
.t_delete => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
if (value.data == .e_index) {
|
||
if (value.data.e_index.index.data == .e_private_identifier) {
|
||
const private = value.data.e_index.index.data.e_private_identifier;
|
||
const name = p.loadNameFromRef(private.ref);
|
||
const range = logger.Range{ .loc = value.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeErrorFmt(p.source, range, p.allocator, "Deleting the private name \"{s}\" is forbidden", .{name}) catch unreachable;
|
||
}
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_delete, .value = value }, loc);
|
||
},
|
||
.t_plus => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_pos, .value = value }, loc);
|
||
},
|
||
.t_minus => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_neg, .value = value }, loc);
|
||
},
|
||
.t_tilde => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_cpl, .value = value }, loc);
|
||
},
|
||
.t_exclamation => {
|
||
try p.lexer.next();
|
||
const value = try p.parseExpr(.prefix);
|
||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
return p.newExpr(E.Unary{ .op = .un_not, .value = value }, loc);
|
||
},
|
||
.t_minus_minus => {
|
||
try p.lexer.next();
|
||
return p.newExpr(E.Unary{ .op = .un_pre_dec, .value = try p.parseExpr(.prefix) }, loc);
|
||
},
|
||
.t_plus_plus => {
|
||
try p.lexer.next();
|
||
return p.newExpr(E.Unary{ .op = .un_pre_inc, .value = try p.parseExpr(.prefix) }, loc);
|
||
},
|
||
.t_function => {
|
||
return try p.parseFnExpr(loc, false, logger.Range.None);
|
||
},
|
||
.t_class => {
|
||
const classKeyword = p.lexer.range();
|
||
// markSyntaxFEatuer class
|
||
try p.lexer.next();
|
||
var name: ?js_ast.LocRef = null;
|
||
|
||
_ = p.pushScopeForParsePass(.class_name, loc) catch unreachable;
|
||
|
||
// Parse an optional class name
|
||
if (p.lexer.token == .t_identifier) {
|
||
const name_text = p.lexer.identifier;
|
||
if (!is_typescript_enabled or !strings.eqlComptime(name_text, "implements")) {
|
||
if (p.fn_or_arrow_data_parse.allow_await != .allow_ident and strings.eqlComptime(name_text, "await")) {
|
||
p.log.addRangeError(p.source, p.lexer.range(), "Cannot use \"await\" as an identifier here") catch unreachable;
|
||
}
|
||
|
||
name = js_ast.LocRef{
|
||
.loc = p.lexer.loc(),
|
||
.ref = p.newSymbol(
|
||
.other,
|
||
name_text,
|
||
) catch unreachable,
|
||
};
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
|
||
// Even anonymous classes can have TypeScript type parameters
|
||
if (is_typescript_enabled) {
|
||
_ = try p.skipTypeScriptTypeParameters(.{ .allow_in_out_variance_annotations = true, .allow_const_modifier = true });
|
||
}
|
||
|
||
const class = try p.parseClass(classKeyword, name, ParseClassOptions{});
|
||
p.popScope();
|
||
|
||
return p.newExpr(class, loc);
|
||
},
|
||
.t_new => {
|
||
try p.lexer.next();
|
||
|
||
// Special-case the weird "new.target" expression here
|
||
if (p.lexer.token == .t_dot) {
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token != .t_identifier or !strings.eqlComptime(p.lexer.raw(), "target")) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
const range = logger.Range{ .loc = loc, .len = p.lexer.range().end().start - loc.start };
|
||
|
||
try p.lexer.next();
|
||
return p.newExpr(E.NewTarget{ .range = range }, loc);
|
||
}
|
||
|
||
const target = try p.parseExprWithFlags(.member, flags);
|
||
var args = ExprNodeList{};
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
// Skip over TypeScript non-null assertions
|
||
if (p.lexer.token == .t_exclamation and !p.lexer.has_newline_before) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// Skip over TypeScript type arguments here if there are any
|
||
if (p.lexer.token == .t_less_than) {
|
||
_ = p.trySkipTypeScriptTypeArgumentsWithBacktracking();
|
||
}
|
||
}
|
||
|
||
var close_parens_loc = logger.Loc.Empty;
|
||
if (p.lexer.token == .t_open_paren) {
|
||
const call_args = try p.parseCallArgs();
|
||
args = call_args.list;
|
||
close_parens_loc = call_args.loc;
|
||
}
|
||
|
||
return p.newExpr(E.New{
|
||
.target = target,
|
||
.args = args,
|
||
.close_parens_loc = close_parens_loc,
|
||
}, loc);
|
||
},
|
||
.t_open_bracket => {
|
||
try p.lexer.next();
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
var items = ListManaged(Expr).init(p.allocator);
|
||
var self_errors = DeferredErrors{};
|
||
var comma_after_spread = logger.Loc{};
|
||
|
||
// Allow "in" inside arrays
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
while (p.lexer.token != .t_close_bracket) {
|
||
switch (p.lexer.token) {
|
||
.t_comma => {
|
||
items.append(Expr{ .data = Prefill.Data.EMissing, .loc = p.lexer.loc() }) catch unreachable;
|
||
},
|
||
.t_dot_dot_dot => {
|
||
if (errors != null)
|
||
errors.?.array_spread_feature = p.lexer.range();
|
||
|
||
const dots_loc = p.lexer.loc();
|
||
try p.lexer.next();
|
||
items.append(
|
||
p.newExpr(E.Spread{ .value = try p.parseExprOrBindings(.comma, &self_errors) }, dots_loc),
|
||
) catch unreachable;
|
||
|
||
// Commas are not allowed here when destructuring
|
||
if (p.lexer.token == .t_comma) {
|
||
comma_after_spread = p.lexer.loc();
|
||
}
|
||
},
|
||
else => {
|
||
items.append(
|
||
try p.parseExprOrBindings(.comma, &self_errors),
|
||
) catch unreachable;
|
||
},
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
const close_bracket_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_close_bracket);
|
||
p.allow_in = old_allow_in;
|
||
|
||
// Is this a binding pattern?
|
||
if (p.willNeedBindingPattern()) {
|
||
// noop
|
||
} else if (errors == null) {
|
||
// Is this an expression?
|
||
p.logExprErrors(&self_errors);
|
||
} else {
|
||
// In this case, we can't distinguish between the two yet
|
||
self_errors.mergeInto(errors.?);
|
||
}
|
||
return p.newExpr(E.Array{
|
||
.items = ExprNodeList.fromList(items),
|
||
.comma_after_spread = comma_after_spread.toNullable(),
|
||
.is_single_line = is_single_line,
|
||
.close_bracket_loc = close_bracket_loc,
|
||
}, loc);
|
||
},
|
||
.t_open_brace => {
|
||
try p.lexer.next();
|
||
var is_single_line = !p.lexer.has_newline_before;
|
||
var properties = ListManaged(G.Property).init(p.allocator);
|
||
var self_errors = DeferredErrors{};
|
||
var comma_after_spread: logger.Loc = logger.Loc{};
|
||
|
||
// Allow "in" inside object literals
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
while (p.lexer.token != .t_close_brace) {
|
||
if (p.lexer.token == .t_dot_dot_dot) {
|
||
try p.lexer.next();
|
||
properties.append(G.Property{ .kind = .spread, .value = try p.parseExpr(.comma) }) catch unreachable;
|
||
|
||
// Commas are not allowed here when destructuring
|
||
if (p.lexer.token == .t_comma) {
|
||
comma_after_spread = p.lexer.loc();
|
||
}
|
||
} else {
|
||
// This property may turn out to be a type in TypeScript, which should be ignored
|
||
var propertyOpts = PropertyOpts{};
|
||
if (try p.parseProperty(.normal, &propertyOpts, &self_errors)) |prop| {
|
||
if (comptime Environment.allow_assert) {
|
||
assert(prop.key != null or prop.value != null);
|
||
}
|
||
properties.append(prop) catch unreachable;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
}
|
||
|
||
if (p.lexer.has_newline_before) {
|
||
is_single_line = false;
|
||
}
|
||
|
||
const close_brace_loc = p.lexer.loc();
|
||
try p.lexer.expect(.t_close_brace);
|
||
p.allow_in = old_allow_in;
|
||
|
||
if (p.willNeedBindingPattern()) {
|
||
// Is this a binding pattern?
|
||
} else if (errors == null) {
|
||
// Is this an expression?
|
||
p.logExprErrors(&self_errors);
|
||
} else {
|
||
// In this case, we can't distinguish between the two yet
|
||
self_errors.mergeInto(errors.?);
|
||
}
|
||
|
||
return p.newExpr(E.Object{
|
||
.properties = G.Property.List.fromList(properties),
|
||
.comma_after_spread = if (comma_after_spread.start > 0)
|
||
comma_after_spread
|
||
else
|
||
null,
|
||
.is_single_line = is_single_line,
|
||
.close_brace_loc = close_brace_loc,
|
||
}, loc);
|
||
},
|
||
.t_less_than => {
|
||
// This is a very complicated and highly ambiguous area of TypeScript
|
||
// syntax. Many similar-looking things are overloaded.
|
||
//
|
||
// TS:
|
||
//
|
||
// A type cast:
|
||
// <A>(x)
|
||
// <[]>(x)
|
||
// <A[]>(x)
|
||
//
|
||
// An arrow function with type parameters:
|
||
// <A>(x) => {}
|
||
// <A, B>(x) => {}
|
||
// <A = B>(x) => {}
|
||
// <A extends B>(x) => {}
|
||
//
|
||
// TSX:
|
||
//
|
||
// A JSX element:
|
||
// <A>(x) => {}</A>
|
||
// <A extends>(x) => {}</A>
|
||
// <A extends={false}>(x) => {}</A>
|
||
//
|
||
// An arrow function with type parameters:
|
||
// <A, B>(x) => {}
|
||
// <A extends B>(x) => {}
|
||
//
|
||
// A syntax error:
|
||
// <[]>(x)
|
||
// <A[]>(x)
|
||
// <A>(x) => {}
|
||
// <A = B>(x) => {}
|
||
if (comptime is_typescript_enabled and is_jsx_enabled) {
|
||
if (try TypeScript.isTSArrowFnJSX(p)) {
|
||
_ = try p.skipTypeScriptTypeParameters(TypeParameterFlag{
|
||
.allow_const_modifier = true,
|
||
});
|
||
try p.lexer.expect(.t_open_paren);
|
||
return try p.parseParenExpr(loc, level, ParenExprOpts{ .force_arrow_fn = true });
|
||
}
|
||
}
|
||
|
||
if (is_jsx_enabled) {
|
||
// Use NextInsideJSXElement() instead of Next() so we parse "<<" as "<"
|
||
try p.lexer.nextInsideJSXElement();
|
||
const element = try p.parseJSXElement(loc);
|
||
|
||
// The call to parseJSXElement() above doesn't consume the last
|
||
// TGreaterThan because the caller knows what Next() function to call.
|
||
// Use Next() instead of NextInsideJSXElement() here since the next
|
||
// token is an expression.
|
||
try p.lexer.next();
|
||
return element;
|
||
}
|
||
|
||
if (is_typescript_enabled) {
|
||
// This is either an old-style type cast or a generic lambda function
|
||
|
||
// "<T>(x)"
|
||
// "<T>(x) => {}"
|
||
switch (p.trySkipTypeScriptTypeParametersThenOpenParenWithBacktracking()) {
|
||
.did_not_skip_anything => {},
|
||
else => |result| {
|
||
try p.lexer.expect(.t_open_paren);
|
||
return p.parseParenExpr(loc, level, ParenExprOpts{
|
||
.force_arrow_fn = result == .definitely_type_parameters,
|
||
});
|
||
},
|
||
}
|
||
|
||
// "<T>x"
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
try p.lexer.expectGreaterThan(false);
|
||
return p.parsePrefix(level, errors, flags);
|
||
}
|
||
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
.t_import => {
|
||
try p.lexer.next();
|
||
return p.parseImportExpr(loc, level);
|
||
},
|
||
else => {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
}
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
fn jsxStringsToMemberExpression(p: *P, loc: logger.Loc, parts: []const []const u8) !Expr {
|
||
const result = try p.findSymbol(loc, parts[0]);
|
||
|
||
const value = p.handleIdentifier(
|
||
loc,
|
||
E.Identifier{
|
||
.ref = result.ref,
|
||
.must_keep_due_to_with_stmt = result.is_inside_with_scope,
|
||
.can_be_removed_if_unused = true,
|
||
},
|
||
parts[0],
|
||
.{
|
||
.was_originally_identifier = true,
|
||
},
|
||
);
|
||
if (parts.len > 1) {
|
||
return p.memberExpression(loc, value, parts[1..]);
|
||
}
|
||
|
||
return value;
|
||
}
|
||
|
||
fn memberExpression(p: *P, loc: logger.Loc, initial_value: Expr, parts: []const []const u8) Expr {
|
||
var value = initial_value;
|
||
|
||
for (parts) |part| {
|
||
if (p.maybeRewritePropertyAccess(
|
||
loc,
|
||
value,
|
||
part,
|
||
loc,
|
||
.{
|
||
.is_call_target = false,
|
||
.assign_target = .none,
|
||
// .is_template_tag = false,
|
||
.is_delete_target = false,
|
||
},
|
||
)) |rewrote| {
|
||
value = rewrote;
|
||
} else {
|
||
value = p.newExpr(
|
||
E.Dot{
|
||
.target = value,
|
||
.name = part,
|
||
.name_loc = loc,
|
||
|
||
.can_be_removed_if_unused = p.options.features.dead_code_elimination,
|
||
},
|
||
loc,
|
||
);
|
||
}
|
||
}
|
||
|
||
return value;
|
||
}
|
||
|
||
// Note: The caller has already parsed the "import" keyword
|
||
fn parseImportExpr(p: *P, loc: logger.Loc, level: Level) anyerror!Expr {
|
||
// Parse an "import.meta" expression
|
||
if (p.lexer.token == .t_dot) {
|
||
p.esm_import_keyword = js_lexer.rangeOfIdentifier(p.source, loc);
|
||
try p.lexer.next();
|
||
if (p.lexer.isContextualKeyword("meta")) {
|
||
try p.lexer.next();
|
||
p.has_import_meta = true;
|
||
return p.newExpr(E.ImportMeta{}, loc);
|
||
} else {
|
||
try p.lexer.expectedString("\"meta\"");
|
||
}
|
||
}
|
||
|
||
if (level.gt(.call)) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||
p.log.addRangeError(p.source, r, "Cannot use an \"import\" expression here without parentheses") catch unreachable;
|
||
}
|
||
|
||
// allow "in" inside call arguments;
|
||
const old_allow_in = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
p.lexer.preserve_all_comments_before = true;
|
||
try p.lexer.expect(.t_open_paren);
|
||
|
||
// const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice();
|
||
p.lexer.comments_to_preserve_before.clearRetainingCapacity();
|
||
|
||
p.lexer.preserve_all_comments_before = false;
|
||
|
||
const value = try p.parseExpr(.comma);
|
||
|
||
var import_options = Expr.empty;
|
||
if (p.lexer.token == .t_comma) {
|
||
// "import('./foo.json', )"
|
||
try p.lexer.next();
|
||
|
||
if (p.lexer.token != .t_close_paren) {
|
||
// "import('./foo.json', { assert: { type: 'json' } })"
|
||
import_options = try p.parseExpr(.comma);
|
||
|
||
if (p.lexer.token == .t_comma) {
|
||
// "import('./foo.json', { assert: { type: 'json' } }, )"
|
||
try p.lexer.next();
|
||
}
|
||
}
|
||
}
|
||
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
p.allow_in = old_allow_in;
|
||
|
||
if (comptime only_scan_imports_and_do_not_visit) {
|
||
if (value.data == .e_string and value.data.e_string.isUTF8() and value.data.e_string.isPresent()) {
|
||
const import_record_index = p.addImportRecord(.dynamic, value.loc, value.data.e_string.slice(p.allocator));
|
||
|
||
return p.newExpr(E.Import{
|
||
.expr = value,
|
||
// .leading_interior_comments = comments,
|
||
.import_record_index = import_record_index,
|
||
.options = import_options,
|
||
}, loc);
|
||
}
|
||
}
|
||
|
||
// _ = comments; // TODO: leading_interior comments
|
||
|
||
return p.newExpr(E.Import{
|
||
.expr = value,
|
||
// .leading_interior_comments = comments,
|
||
.import_record_index = std.math.maxInt(u32),
|
||
.options = import_options,
|
||
}, loc);
|
||
}
|
||
|
||
fn parseJSXPropValueIdentifier(p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
|
||
// Use NextInsideJSXElement() not Next() so we can parse a JSX-style string literal
|
||
try p.lexer.nextInsideJSXElement();
|
||
if (p.lexer.token == .t_string_literal) {
|
||
previous_string_with_backslash_loc.start = @max(p.lexer.loc().start, p.lexer.previous_backslash_quote_in_jsx.loc.start);
|
||
const expr = p.newExpr(try p.lexer.toEString(), previous_string_with_backslash_loc.*);
|
||
|
||
try p.lexer.nextInsideJSXElement();
|
||
return expr;
|
||
} else {
|
||
// Use Expect() not ExpectInsideJSXElement() so we can parse expression tokens
|
||
try p.lexer.expect(.t_open_brace);
|
||
const value = try p.parseExpr(.lowest);
|
||
|
||
try p.lexer.expectInsideJSXElement(.t_close_brace);
|
||
return value;
|
||
}
|
||
}
|
||
|
||
fn parseJSXElement(p: *P, loc: logger.Loc) anyerror!Expr {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
p.needs_jsx_import = true;
|
||
}
|
||
|
||
const tag = try JSXTag.parse(P, p);
|
||
|
||
// The tag may have TypeScript type arguments: "<Foo<T>/>"
|
||
if (is_typescript_enabled) {
|
||
// Pass a flag to the type argument skipper because we need to call
|
||
_ = try p.skipTypeScriptTypeArguments(true);
|
||
}
|
||
|
||
var previous_string_with_backslash_loc = logger.Loc{};
|
||
var properties = G.Property.List{};
|
||
var key_prop_i: i32 = -1;
|
||
var flags = Flags.JSXElement.Bitset{};
|
||
var start_tag: ?ExprNodeIndex = null;
|
||
|
||
// Fragments don't have props
|
||
// Fragments of the form "React.Fragment" are not parsed as fragments.
|
||
if (@as(JSXTag.TagType, tag.data) == .tag) {
|
||
start_tag = tag.data.tag;
|
||
|
||
var spread_loc: logger.Loc = logger.Loc.Empty;
|
||
var props = ListManaged(G.Property).init(p.allocator);
|
||
var first_spread_prop_i: i32 = -1;
|
||
var i: i32 = 0;
|
||
parse_attributes: while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_identifier => {
|
||
defer i += 1;
|
||
// Parse the prop name
|
||
const key_range = p.lexer.range();
|
||
const prop_name_literal = p.lexer.identifier;
|
||
const special_prop = E.JSXElement.SpecialProp.Map.get(prop_name_literal) orelse E.JSXElement.SpecialProp.any;
|
||
try p.lexer.nextInsideJSXElement();
|
||
|
||
if (special_prop == .key) {
|
||
// <ListItem key>
|
||
if (p.lexer.token != .t_equals) {
|
||
// Unlike Babel, we're going to just warn here and move on.
|
||
try p.log.addWarning(p.source, key_range.loc, "\"key\" prop ignored. Must be a string, number or symbol.");
|
||
continue;
|
||
}
|
||
|
||
key_prop_i = i;
|
||
}
|
||
|
||
const prop_name = p.newExpr(E.String{ .data = prop_name_literal }, key_range.loc);
|
||
|
||
// Parse the value
|
||
var value: Expr = undefined;
|
||
if (p.lexer.token != .t_equals) {
|
||
|
||
// Implicitly true value
|
||
// <button selected>
|
||
value = p.newExpr(E.Boolean{ .value = true }, logger.Loc{ .start = key_range.loc.start + key_range.len });
|
||
} else {
|
||
value = try p.parseJSXPropValueIdentifier(&previous_string_with_backslash_loc);
|
||
}
|
||
|
||
try props.append(G.Property{ .key = prop_name, .value = value });
|
||
},
|
||
.t_open_brace => {
|
||
defer i += 1;
|
||
// Use Next() not ExpectInsideJSXElement() so we can parse "..."
|
||
try p.lexer.next();
|
||
|
||
switch (p.lexer.token) {
|
||
.t_dot_dot_dot => {
|
||
try p.lexer.next();
|
||
|
||
if (first_spread_prop_i == -1) first_spread_prop_i = i;
|
||
spread_loc = p.lexer.loc();
|
||
try props.append(G.Property{ .value = try p.parseExpr(.comma), .kind = .spread });
|
||
},
|
||
// This implements
|
||
// <div {foo} />
|
||
// ->
|
||
// <div foo={foo} />
|
||
T.t_identifier => {
|
||
// we need to figure out what the key they mean is
|
||
// to do that, we must determine the key name
|
||
const expr = try p.parseExpr(Level.lowest);
|
||
|
||
const key = brk: {
|
||
switch (expr.data) {
|
||
.e_import_identifier => |ident| {
|
||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||
},
|
||
.e_commonjs_export_identifier => |ident| {
|
||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||
},
|
||
.e_identifier => |ident| {
|
||
break :brk p.newExpr(E.String{ .data = p.loadNameFromRef(ident.ref) }, expr.loc);
|
||
},
|
||
.e_dot => |dot| {
|
||
break :brk p.newExpr(E.String{ .data = dot.name }, dot.name_loc);
|
||
},
|
||
.e_index => |index| {
|
||
if (index.index.data == .e_string) {
|
||
break :brk index.index;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
// If we get here, it's invalid
|
||
try p.log.addError(p.source, expr.loc, "Invalid JSX prop shorthand, must be identifier, dot or string");
|
||
return error.SyntaxError;
|
||
};
|
||
|
||
try props.append(G.Property{ .value = expr, .key = key, .kind = .normal });
|
||
},
|
||
// This implements
|
||
// <div {"foo"} />
|
||
// <div {'foo'} />
|
||
// ->
|
||
// <div foo="foo" />
|
||
// note: template literals are not supported, operations on strings are not supported either
|
||
T.t_string_literal => {
|
||
const key = p.newExpr(try p.lexer.toEString(), p.lexer.loc());
|
||
try p.lexer.next();
|
||
try props.append(G.Property{ .value = key, .key = key, .kind = .normal });
|
||
},
|
||
|
||
else => try p.lexer.unexpected(),
|
||
}
|
||
|
||
try p.lexer.nextInsideJSXElement();
|
||
},
|
||
else => {
|
||
break :parse_attributes;
|
||
},
|
||
}
|
||
}
|
||
|
||
const is_key_after_spread = key_prop_i > -1 and first_spread_prop_i > -1 and key_prop_i > first_spread_prop_i;
|
||
flags.setPresent(.is_key_after_spread, is_key_after_spread);
|
||
properties = G.Property.List.fromList(props);
|
||
if (is_key_after_spread and p.options.jsx.runtime == .automatic and !p.has_classic_runtime_warned) {
|
||
try p.log.addWarning(p.source, spread_loc, "\"key\" prop after a {...spread} is deprecated in JSX. Falling back to classic runtime.");
|
||
p.has_classic_runtime_warned = true;
|
||
}
|
||
}
|
||
|
||
// People sometimes try to use the output of "JSON.stringify()" as a JSX
|
||
// attribute when automatically-generating JSX code. Doing so is incorrect
|
||
// because JSX strings work like XML instead of like JS (since JSX is XML-in-
|
||
// JS). Specifically, using a backslash before a quote does not cause it to
|
||
// be escaped:
|
||
//
|
||
// JSX ends the "content" attribute here and sets "content" to 'some so-called \\'
|
||
// v
|
||
// <Button content="some so-called \"button text\"" />
|
||
// ^
|
||
// There is no "=" after the JSX attribute "text", so we expect a ">"
|
||
//
|
||
// This code special-cases this error to provide a less obscure error message.
|
||
if (p.lexer.token == .t_syntax_error and strings.eqlComptime(p.lexer.raw(), "\\") and previous_string_with_backslash_loc.start > 0) {
|
||
const r = p.lexer.range();
|
||
// Not dealing with this right now.
|
||
try p.log.addRangeError(p.source, r, "Invalid JSX escape - use XML entity codes quotes or pass a JavaScript string instead");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
// A slash here is a self-closing element
|
||
if (p.lexer.token == .t_slash) {
|
||
const close_tag_loc = p.lexer.loc();
|
||
// Use NextInsideJSXElement() not Next() so we can parse ">>" as ">"
|
||
|
||
try p.lexer.nextInsideJSXElement();
|
||
|
||
if (p.lexer.token != .t_greater_than) {
|
||
try p.lexer.expected(.t_greater_than);
|
||
}
|
||
|
||
return p.newExpr(E.JSXElement{
|
||
.tag = start_tag,
|
||
.properties = properties,
|
||
.key_prop_index = key_prop_i,
|
||
.flags = flags,
|
||
.close_tag_loc = close_tag_loc,
|
||
}, loc);
|
||
}
|
||
|
||
// Use ExpectJSXElementChild() so we parse child strings
|
||
try p.lexer.expectJSXElementChild(.t_greater_than);
|
||
var children = ListManaged(Expr).init(p.allocator);
|
||
// var last_element_i: usize = 0;
|
||
|
||
while (true) {
|
||
switch (p.lexer.token) {
|
||
.t_string_literal => {
|
||
try children.append(p.newExpr(try p.lexer.toEString(), loc));
|
||
try p.lexer.nextJSXElementChild();
|
||
},
|
||
.t_open_brace => {
|
||
// Use Next() instead of NextJSXElementChild() here since the next token is an expression
|
||
try p.lexer.next();
|
||
|
||
const is_spread = p.lexer.token == .t_dot_dot_dot;
|
||
if (is_spread) {
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// The expression is optional, and may be absent
|
||
if (p.lexer.token != .t_close_brace) {
|
||
var item = try p.parseExpr(.lowest);
|
||
if (is_spread) {
|
||
item = p.newExpr(E.Spread{ .value = item }, loc);
|
||
}
|
||
try children.append(item);
|
||
}
|
||
|
||
// Use ExpectJSXElementChild() so we parse child strings
|
||
try p.lexer.expectJSXElementChild(.t_close_brace);
|
||
},
|
||
.t_less_than => {
|
||
const less_than_loc = p.lexer.loc();
|
||
try p.lexer.nextInsideJSXElement();
|
||
|
||
if (p.lexer.token != .t_slash) {
|
||
// This is a child element
|
||
|
||
children.append(try p.parseJSXElement(less_than_loc)) catch unreachable;
|
||
|
||
// The call to parseJSXElement() above doesn't consume the last
|
||
// TGreaterThan because the caller knows what Next() function to call.
|
||
// Use NextJSXElementChild() here since the next token is an element
|
||
// child.
|
||
try p.lexer.nextJSXElementChild();
|
||
continue;
|
||
}
|
||
|
||
// This is the closing element
|
||
try p.lexer.nextInsideJSXElement();
|
||
const end_tag = try JSXTag.parse(P, p);
|
||
|
||
if (!strings.eql(end_tag.name, tag.name)) {
|
||
try p.log.addRangeErrorFmtWithNote(
|
||
p.source,
|
||
end_tag.range,
|
||
p.allocator,
|
||
"Expected closing JSX tag to match opening tag \"\\<{s}\\>\"",
|
||
.{tag.name},
|
||
"Opening tag here:",
|
||
.{},
|
||
tag.range,
|
||
);
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
if (p.lexer.token != .t_greater_than) {
|
||
try p.lexer.expected(.t_greater_than);
|
||
}
|
||
|
||
return p.newExpr(E.JSXElement{
|
||
.tag = end_tag.data.asExpr(),
|
||
.children = ExprNodeList.fromList(children),
|
||
.properties = properties,
|
||
.key_prop_index = key_prop_i,
|
||
.flags = flags,
|
||
.close_tag_loc = end_tag.range.loc,
|
||
}, loc);
|
||
},
|
||
else => {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
|
||
fn willNeedBindingPattern(p: *P) bool {
|
||
return switch (p.lexer.token) {
|
||
// "[a] = b;"
|
||
.t_equals => true,
|
||
// "for ([a] in b) {}"
|
||
.t_in => !p.allow_in,
|
||
// "for ([a] of b) {}"
|
||
.t_identifier => !p.allow_in and p.lexer.isContextualKeyword("of"),
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
fn appendPart(p: *P, parts: *ListManaged(js_ast.Part), stmts: []Stmt) anyerror!void {
|
||
// Reuse the memory if possible
|
||
// This is reusable if the last part turned out to be dead
|
||
p.symbol_uses.clearRetainingCapacity();
|
||
p.declared_symbols.clearRetainingCapacity();
|
||
p.scopes_for_current_part.clearRetainingCapacity();
|
||
p.import_records_for_current_part.clearRetainingCapacity();
|
||
p.import_symbol_property_uses.clearRetainingCapacity();
|
||
|
||
p.had_commonjs_named_exports_this_visit = false;
|
||
|
||
const allocator = p.allocator;
|
||
var opts = PrependTempRefsOpts{};
|
||
var partStmts = ListManaged(Stmt).fromOwnedSlice(allocator, stmts);
|
||
|
||
try p.visitStmtsAndPrependTempRefs(&partStmts, &opts);
|
||
|
||
// Insert any relocated variable statements now
|
||
if (p.relocated_top_level_vars.items.len > 0) {
|
||
var already_declared = RefMap{};
|
||
var already_declared_allocator_stack = std.heap.stackFallback(1024, allocator);
|
||
const already_declared_allocator = already_declared_allocator_stack.get();
|
||
defer if (already_declared_allocator_stack.fixed_buffer_allocator.end_index >= 1023) already_declared.deinit(already_declared_allocator);
|
||
|
||
for (p.relocated_top_level_vars.items) |*local| {
|
||
// Follow links because "var" declarations may be merged due to hoisting
|
||
while (local.ref != null) {
|
||
var symbol = &p.symbols.items[local.ref.?.innerIndex()];
|
||
if (!symbol.hasLink()) {
|
||
break;
|
||
}
|
||
local.ref = symbol.link;
|
||
}
|
||
const ref = local.ref orelse continue;
|
||
const declaration_entry = try already_declared.getOrPut(already_declared_allocator, ref);
|
||
if (!declaration_entry.found_existing) {
|
||
const decls = try allocator.alloc(G.Decl, 1);
|
||
decls[0] = Decl{
|
||
.binding = p.b(B.Identifier{ .ref = ref }, local.loc),
|
||
};
|
||
try partStmts.append(p.s(S.Local{ .decls = G.Decl.List.init(decls) }, local.loc));
|
||
}
|
||
}
|
||
p.relocated_top_level_vars.clearRetainingCapacity();
|
||
}
|
||
|
||
if (partStmts.items.len > 0) {
|
||
const final_stmts = partStmts.items;
|
||
|
||
try parts.append(js_ast.Part{
|
||
.stmts = final_stmts,
|
||
.symbol_uses = p.symbol_uses,
|
||
.import_symbol_property_uses = p.import_symbol_property_uses,
|
||
.declared_symbols = p.declared_symbols.toOwnedSlice(),
|
||
.import_record_indices = bun.BabyList(u32).init(
|
||
p.import_records_for_current_part.toOwnedSlice(
|
||
p.allocator,
|
||
) catch unreachable,
|
||
),
|
||
.scopes = try p.scopes_for_current_part.toOwnedSlice(p.allocator),
|
||
.can_be_removed_if_unused = p.stmtsCanBeRemovedIfUnused(final_stmts),
|
||
.tag = if (p.had_commonjs_named_exports_this_visit) js_ast.Part.Tag.commonjs_named_export else .none,
|
||
});
|
||
p.symbol_uses = .{};
|
||
p.import_symbol_property_uses = .{};
|
||
p.had_commonjs_named_exports_this_visit = false;
|
||
} else if (p.declared_symbols.len() > 0 or p.symbol_uses.count() > 0) {
|
||
// if the part is dead, invalidate all the usage counts
|
||
p.clearSymbolUsagesFromDeadPart(.{ .stmts = undefined, .declared_symbols = p.declared_symbols, .symbol_uses = p.symbol_uses });
|
||
p.declared_symbols.clearRetainingCapacity();
|
||
p.import_records_for_current_part.clearRetainingCapacity();
|
||
}
|
||
}
|
||
|
||
fn bindingCanBeRemovedIfUnused(p: *P, binding: Binding) bool {
|
||
if (!p.options.features.dead_code_elimination) return false;
|
||
switch (binding.data) {
|
||
.b_array => |bi| {
|
||
for (bi.items) |*item| {
|
||
if (!p.bindingCanBeRemovedIfUnused(item.binding)) {
|
||
return false;
|
||
}
|
||
|
||
if (item.default_value) |*default| {
|
||
if (!p.exprCanBeRemovedIfUnused(default)) {
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.b_object => |bi| {
|
||
for (bi.properties) |*property| {
|
||
if (!property.flags.contains(.is_spread) and !p.exprCanBeRemovedIfUnused(&property.key)) {
|
||
return false;
|
||
}
|
||
|
||
if (!p.bindingCanBeRemovedIfUnused(property.value)) {
|
||
return false;
|
||
}
|
||
|
||
if (property.default_value) |*default| {
|
||
if (!p.exprCanBeRemovedIfUnused(default)) {
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
fn stmtsCanBeRemovedIfUnused(p: *P, stmts: []Stmt) bool {
|
||
if (!p.options.features.dead_code_elimination) return false;
|
||
for (stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
// These never have side effects
|
||
.s_function, .s_empty => {},
|
||
|
||
// Let these be removed if they are unused. Note that we also need to
|
||
// check if the imported file is marked as "sideEffects: false" before we
|
||
// can remove a SImport statement. Otherwise the import must be kept for
|
||
// its side effects.
|
||
.s_import => {},
|
||
|
||
.s_class => |st| {
|
||
if (!p.classCanBeRemovedIfUnused(&st.class)) {
|
||
return false;
|
||
}
|
||
},
|
||
|
||
.s_expr => |st| {
|
||
if (st.does_not_affect_tree_shaking) {
|
||
// Expressions marked with this are automatically generated and have
|
||
// no side effects by construction.
|
||
continue;
|
||
}
|
||
|
||
if (!p.exprCanBeRemovedIfUnused(&st.value)) {
|
||
return false;
|
||
}
|
||
},
|
||
|
||
.s_local => |st| {
|
||
// "await" is a side effect because it affects code timing
|
||
if (st.kind == .k_await_using) return false;
|
||
|
||
for (st.decls.slice()) |*decl| {
|
||
if (!p.bindingCanBeRemovedIfUnused(decl.binding)) {
|
||
return false;
|
||
}
|
||
|
||
if (decl.value) |*decl_value| {
|
||
if (!p.exprCanBeRemovedIfUnused(decl_value)) {
|
||
return false;
|
||
} else if (st.kind == .k_using) {
|
||
// "using" declarations are only side-effect free if they are initialized to null or undefined
|
||
if (decl_value.data != .e_null and decl_value.data != .e_undefined) {
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
|
||
.s_try => |try_| {
|
||
if (!p.stmtsCanBeRemovedIfUnused(try_.body) or (try_.finally != null and !p.stmtsCanBeRemovedIfUnused(try_.finally.?.stmts))) {
|
||
return false;
|
||
}
|
||
},
|
||
|
||
// Exports are tracked separately, so this isn't necessary
|
||
.s_export_clause, .s_export_from => {},
|
||
|
||
.s_export_default => |st| {
|
||
switch (st.value) {
|
||
.stmt => |s2| {
|
||
switch (s2.data) {
|
||
.s_expr => |s_expr| {
|
||
if (!p.exprCanBeRemovedIfUnused(&s_expr.value)) {
|
||
return false;
|
||
}
|
||
},
|
||
|
||
// These never have side effects
|
||
.s_function => {},
|
||
|
||
.s_class => {
|
||
if (!p.classCanBeRemovedIfUnused(&s2.data.s_class.class)) {
|
||
return false;
|
||
}
|
||
},
|
||
else => {
|
||
Output.panic("Unexpected type in export default: {any}", .{s2});
|
||
},
|
||
}
|
||
},
|
||
.expr => |*exp| {
|
||
if (!p.exprCanBeRemovedIfUnused(exp)) {
|
||
return false;
|
||
}
|
||
},
|
||
}
|
||
},
|
||
|
||
else => {
|
||
// Assume that all statements not explicitly special-cased here have side
|
||
// effects, and cannot be removed even if unused
|
||
return false;
|
||
},
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
fn visitStmtsAndPrependTempRefs(p: *P, stmts: *ListManaged(Stmt), opts: *PrependTempRefsOpts) anyerror!void {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
|
||
p.temp_refs_to_declare.deinit(p.allocator);
|
||
p.temp_refs_to_declare = @TypeOf(p.temp_refs_to_declare){};
|
||
p.temp_ref_count = 0;
|
||
|
||
try p.visitStmts(stmts, opts.kind);
|
||
|
||
// Prepend values for "this" and "arguments"
|
||
if (opts.fn_body_loc) |fn_body_loc| {
|
||
// Capture "this"
|
||
if (p.fn_only_data_visit.this_capture_ref) |ref| {
|
||
try p.temp_refs_to_declare.append(p.allocator, TempRef{
|
||
.ref = ref,
|
||
.value = p.newExpr(E.This{}, fn_body_loc),
|
||
});
|
||
}
|
||
}
|
||
}
|
||
|
||
fn recordDeclaredSymbol(p: *P, ref: Ref) anyerror!void {
|
||
bun.assert(ref.isSymbol());
|
||
try p.declared_symbols.append(p.allocator, DeclaredSymbol{
|
||
.ref = ref,
|
||
.is_top_level = p.current_scope == p.module_scope,
|
||
});
|
||
}
|
||
|
||
// public for JSNode.JSXWriter usage
|
||
pub fn visitExpr(p: *P, expr: Expr) Expr {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
|
||
// hopefully this gets tailed
|
||
return p.visitExprInOut(expr, .{});
|
||
}
|
||
|
||
fn visitFunc(p: *P, _func: G.Fn, open_parens_loc: logger.Loc) G.Fn {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
|
||
var func = _func;
|
||
const old_fn_or_arrow_data = p.fn_or_arrow_data_visit;
|
||
const old_fn_only_data = p.fn_only_data_visit;
|
||
p.fn_or_arrow_data_visit = FnOrArrowDataVisit{ .is_async = func.flags.contains(.is_async) };
|
||
p.fn_only_data_visit = FnOnlyDataVisit{ .is_this_nested = true, .arguments_ref = func.arguments_ref };
|
||
|
||
if (func.name) |name| {
|
||
if (name.ref) |name_ref| {
|
||
p.recordDeclaredSymbol(name_ref) catch unreachable;
|
||
const symbol_name = p.loadNameFromRef(name_ref);
|
||
if (isEvalOrArguments(symbol_name)) {
|
||
p.markStrictModeFeature(.eval_or_arguments, js_lexer.rangeOfIdentifier(p.source, name.loc), symbol_name) catch unreachable;
|
||
}
|
||
}
|
||
}
|
||
|
||
const body = func.body;
|
||
|
||
p.pushScopeForVisitPass(.function_args, open_parens_loc) catch unreachable;
|
||
p.visitArgs(
|
||
func.args,
|
||
VisitArgsOpts{
|
||
.has_rest_arg = func.flags.contains(.has_rest_arg),
|
||
.body = body.stmts,
|
||
.is_unique_formal_parameters = true,
|
||
},
|
||
);
|
||
|
||
p.pushScopeForVisitPass(.function_body, body.loc) catch unreachable;
|
||
var stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, body.stmts);
|
||
var temp_opts = PrependTempRefsOpts{ .kind = StmtsKind.fn_body, .fn_body_loc = body.loc };
|
||
p.visitStmtsAndPrependTempRefs(&stmts, &temp_opts) catch unreachable;
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
const hook_storage = p.react_refresh.hook_ctx_storage orelse
|
||
unreachable; // caller did not init hook storage. any function can have react hooks!
|
||
|
||
if (hook_storage.*) |*hook| {
|
||
p.handleReactRefreshPostVisitFunctionBody(&stmts, hook);
|
||
}
|
||
}
|
||
|
||
func.body = G.FnBody{ .stmts = stmts.items, .loc = body.loc };
|
||
|
||
p.popScope();
|
||
p.popScope();
|
||
|
||
p.fn_or_arrow_data_visit = old_fn_or_arrow_data;
|
||
p.fn_only_data_visit = old_fn_only_data;
|
||
|
||
return func;
|
||
}
|
||
|
||
fn deoptimizeCommonJSNamedExports(p: *P) void {
|
||
// exists for debugging
|
||
p.commonjs_named_exports_deoptimized = true;
|
||
}
|
||
|
||
fn maybeKeepExprSymbolName(p: *P, expr: Expr, original_name: string, was_anonymous_named_expr: bool) Expr {
|
||
return if (was_anonymous_named_expr) p.keepExprSymbolName(expr, original_name) else expr;
|
||
}
|
||
|
||
fn valueForThis(p: *P, loc: logger.Loc) ?Expr {
|
||
// Substitute "this" if we're inside a static class property initializer
|
||
if (p.fn_only_data_visit.should_replace_this_with_class_name_ref) {
|
||
if (p.fn_only_data_visit.class_name_ref) |ref| {
|
||
p.recordUsage(ref.*);
|
||
return p.newExpr(E.Identifier{ .ref = ref.* }, loc);
|
||
}
|
||
}
|
||
|
||
// oroigianlly was !=- modepassthrough
|
||
if (!p.fn_only_data_visit.is_this_nested) {
|
||
if (p.has_es_module_syntax and p.commonjs_named_exports.count() == 0) {
|
||
// In an ES6 module, "this" is supposed to be undefined. Instead of
|
||
// doing this at runtime using "fn.call(undefined)", we do it at
|
||
// compile time using expression substitution here.
|
||
return Expr{ .loc = loc, .data = nullValueExpr };
|
||
} else {
|
||
// In a CommonJS module, "this" is supposed to be the same as "exports".
|
||
// Instead of doing this at runtime using "fn.call(module.exports)", we
|
||
// do it at compile time using expression substitution here.
|
||
p.recordUsage(p.exports_ref);
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return p.newExpr(E.Identifier{ .ref = p.exports_ref }, loc);
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
fn isValidAssignmentTarget(p: *P, expr: Expr) bool {
|
||
return switch (expr.data) {
|
||
.e_identifier => |ident| !isEvalOrArguments(p.loadNameFromRef(ident.ref)),
|
||
.e_dot => |e| e.optional_chain == null,
|
||
.e_index => |e| e.optional_chain == null,
|
||
.e_array => |e| !e.is_parenthesized,
|
||
.e_object => |e| !e.is_parenthesized,
|
||
else => false,
|
||
};
|
||
}
|
||
|
||
fn visitExprInOut(p: *P, expr: Expr, in: ExprIn) Expr {
|
||
if (in.assign_target != .none and !p.isValidAssignmentTarget(expr)) {
|
||
p.log.addError(p.source, expr.loc, "Invalid assignment target") catch unreachable;
|
||
}
|
||
|
||
// Output.print("\nVisit: {s} - {d}\n", .{ @tagName(expr.data), expr.loc.start });
|
||
switch (expr.data) {
|
||
.e_null, .e_super, .e_boolean, .e_big_int, .e_reg_exp, .e_undefined => {},
|
||
|
||
.e_new_target => |_| {
|
||
// this error is not necessary and it is causing breakages
|
||
// if (!p.fn_only_data_visit.is_new_target_allowed) {
|
||
// p.log.addRangeError(p.source, target.range, "Cannot use \"new.target\" here") catch unreachable;
|
||
// }
|
||
},
|
||
.e_string => {
|
||
|
||
// If you're using this, you're probably not using 0-prefixed legacy octal notation
|
||
// if e.LegacyOctalLoc.Start > 0 {
|
||
},
|
||
.e_number => {
|
||
|
||
// idc about legacy octal loc
|
||
},
|
||
.e_this => {
|
||
if (p.valueForThis(expr.loc)) |exp| {
|
||
return exp;
|
||
}
|
||
|
||
// // Capture "this" inside arrow functions that will be lowered into normal
|
||
// // function expressions for older language environments
|
||
// if p.fnOrArrowDataVisit.isArrow && p.options.unsupportedJSFeatures.Has(compat.Arrow) && p.fnOnlyDataVisit.isThisNested {
|
||
// return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EIdentifier{Ref: p.captureThis()}}, exprOut{}
|
||
// }
|
||
},
|
||
.e_import_meta => {
|
||
// TODO: delete import.meta might not work
|
||
const is_delete_target = std.meta.activeTag(p.delete_target) == .e_import_meta;
|
||
|
||
if (p.define.dots.get("meta")) |meta| {
|
||
for (meta) |define| {
|
||
// TODO: clean up how we do define matches
|
||
if (p.isDotDefineMatch(expr, define.parts)) {
|
||
// Substitute user-specified defines
|
||
return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data);
|
||
}
|
||
}
|
||
}
|
||
|
||
if (!p.import_meta_ref.isNull()) {
|
||
p.recordUsage(p.import_meta_ref);
|
||
return p.newExpr(E.Identifier{ .ref = p.import_meta_ref }, expr.loc);
|
||
}
|
||
},
|
||
.e_spread => |exp| {
|
||
exp.value = p.visitExpr(exp.value);
|
||
},
|
||
.e_identifier => {
|
||
var e_ = expr.data.e_identifier;
|
||
const is_delete_target = @as(Expr.Tag, p.delete_target) == .e_identifier and e_.ref.eql(p.delete_target.e_identifier.ref);
|
||
|
||
const name = p.loadNameFromRef(e_.ref);
|
||
if (p.isStrictMode() and js_lexer.StrictModeReservedWords.has(name)) {
|
||
p.markStrictModeFeature(.reserved_word, js_lexer.rangeOfIdentifier(p.source, expr.loc), name) catch unreachable;
|
||
}
|
||
|
||
const result = p.findSymbol(expr.loc, name) catch unreachable;
|
||
|
||
e_.must_keep_due_to_with_stmt = result.is_inside_with_scope;
|
||
e_.ref = result.ref;
|
||
|
||
// Handle assigning to a constant
|
||
if (in.assign_target != .none) {
|
||
if (p.symbols.items[result.ref.innerIndex()].kind == .constant) { // TODO: silence this for runtime transpiler
|
||
const r = js_lexer.rangeOfIdentifier(p.source, expr.loc);
|
||
var notes = p.allocator.alloc(logger.Data, 1) catch unreachable;
|
||
notes[0] = logger.Data{
|
||
.text = std.fmt.allocPrint(p.allocator, "The symbol \"{s}\" was declared a constant here:", .{name}) catch unreachable,
|
||
.location = logger.Location.initOrNull(p.source, js_lexer.rangeOfIdentifier(p.source, result.declare_loc.?)),
|
||
};
|
||
|
||
const is_error = p.const_values.contains(result.ref) or p.options.bundle;
|
||
switch (is_error) {
|
||
true => p.log.addRangeErrorFmtWithNotes(
|
||
p.source,
|
||
r,
|
||
p.allocator,
|
||
notes,
|
||
"Cannot assign to \"{s}\" because it is a constant",
|
||
.{name},
|
||
) catch unreachable,
|
||
|
||
false => p.log.addRangeErrorFmtWithNotes(
|
||
p.source,
|
||
r,
|
||
p.allocator,
|
||
notes,
|
||
"This assignment will throw because \"{s}\" is a constant",
|
||
.{name},
|
||
) catch unreachable,
|
||
}
|
||
} else if (p.exports_ref.eql(e_.ref)) {
|
||
// Assigning to `exports` in a CommonJS module must be tracked to undo the
|
||
// `module.exports` -> `exports` optimization.
|
||
p.commonjs_module_exports_assigned_deoptimized = true;
|
||
}
|
||
}
|
||
|
||
var original_name: ?string = null;
|
||
|
||
// Substitute user-specified defines for unbound symbols
|
||
if (p.symbols.items[e_.ref.innerIndex()].kind == .unbound and !result.is_inside_with_scope and !is_delete_target) {
|
||
if (p.define.forIdentifier(name)) |def| {
|
||
if (!def.valueless) {
|
||
const newvalue = p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &def);
|
||
|
||
// Don't substitute an identifier for a non-identifier if this is an
|
||
// assignment target, since it'll cause a syntax error
|
||
if (@as(Expr.Tag, newvalue.data) == .e_identifier or in.assign_target == .none) {
|
||
p.ignoreUsage(e_.ref);
|
||
return newvalue;
|
||
}
|
||
|
||
original_name = def.original_name;
|
||
}
|
||
|
||
// Copy the side effect flags over in case this expression is unused
|
||
if (def.can_be_removed_if_unused) {
|
||
e_.can_be_removed_if_unused = true;
|
||
}
|
||
if (def.call_can_be_unwrapped_if_unused and !p.options.ignore_dce_annotations) {
|
||
e_.call_can_be_unwrapped_if_unused = true;
|
||
}
|
||
|
||
// If the user passed --drop=console, drop all property accesses to console.
|
||
if (def.method_call_must_be_replaced_with_undefined and in.property_access_for_method_call_maybe_should_replace_with_undefined and in.assign_target == .none) {
|
||
p.method_call_must_be_replaced_with_undefined = true;
|
||
}
|
||
}
|
||
|
||
// Substitute uncalled "require" for the require target
|
||
if (p.require_ref.eql(e_.ref) and !p.isSourceRuntime()) {
|
||
// mark a reference to __require only if this is not about to be used for a call target
|
||
if (!(p.call_target == .e_identifier and
|
||
expr.data.e_identifier.ref.eql(p.call_target.e_identifier.ref)) and
|
||
p.options.features.allow_runtime)
|
||
{
|
||
p.recordUsageOfRuntimeRequire();
|
||
}
|
||
|
||
return p.valueForRequire(expr.loc);
|
||
}
|
||
}
|
||
|
||
return p.handleIdentifier(expr.loc, e_, original_name, IdentifierOpts{
|
||
.assign_target = in.assign_target,
|
||
.is_delete_target = is_delete_target,
|
||
.is_call_target = @as(Expr.Tag, p.call_target) == .e_identifier and expr.data.e_identifier.ref.eql(p.call_target.e_identifier.ref),
|
||
.was_originally_identifier = true,
|
||
});
|
||
},
|
||
.e_jsx_element => |e_| {
|
||
switch (comptime jsx_transform_type) {
|
||
.react => {
|
||
const tag: Expr = tagger: {
|
||
if (e_.tag) |_tag| {
|
||
break :tagger p.visitExpr(_tag);
|
||
} else {
|
||
if (p.options.jsx.runtime == .classic) {
|
||
break :tagger p.jsxStringsToMemberExpression(expr.loc, p.options.jsx.fragment) catch unreachable;
|
||
}
|
||
|
||
break :tagger p.jsxImport(.Fragment, expr.loc);
|
||
}
|
||
};
|
||
|
||
const all_props: []G.Property = e_.properties.slice();
|
||
for (all_props) |*property| {
|
||
if (property.kind != .spread) {
|
||
property.key = p.visitExpr(property.key.?);
|
||
}
|
||
|
||
if (property.value != null) {
|
||
property.value = p.visitExpr(property.value.?);
|
||
}
|
||
|
||
if (property.initializer != null) {
|
||
property.initializer = p.visitExpr(property.initializer.?);
|
||
}
|
||
}
|
||
|
||
const runtime = if (p.options.jsx.runtime == .automatic) options.JSX.Runtime.automatic else options.JSX.Runtime.classic;
|
||
const is_key_after_spread = e_.flags.contains(.is_key_after_spread);
|
||
const children_count = e_.children.len;
|
||
|
||
// TODO: maybe we should split these into two different AST Nodes
|
||
// That would reduce the amount of allocations a little
|
||
if (runtime == .classic or is_key_after_spread) {
|
||
// Arguments to createElement()
|
||
const args = p.allocator.alloc(Expr, 2 + children_count) catch unreachable;
|
||
// There are at least two args:
|
||
// - name of the tag
|
||
// - props
|
||
var i: usize = 2;
|
||
args[0] = tag;
|
||
|
||
const num_props = e_.properties.len;
|
||
if (num_props > 0) {
|
||
const props = p.allocator.alloc(G.Property, num_props) catch unreachable;
|
||
bun.copy(G.Property, props, e_.properties.slice());
|
||
args[1] = p.newExpr(E.Object{ .properties = G.Property.List.init(props) }, expr.loc);
|
||
} else {
|
||
args[1] = p.newExpr(E.Null{}, expr.loc);
|
||
}
|
||
|
||
const children_elements = e_.children.slice()[0..children_count];
|
||
for (children_elements) |child| {
|
||
args[i] = p.visitExpr(child);
|
||
i += @as(usize, @intCast(@intFromBool(args[i].data != .e_missing)));
|
||
}
|
||
|
||
const target = p.jsxStringsToMemberExpression(expr.loc, p.options.jsx.factory) catch unreachable;
|
||
|
||
// Call createElement()
|
||
return p.newExpr(E.Call{
|
||
.target = if (runtime == .classic) target else p.jsxImport(.createElement, expr.loc),
|
||
.args = ExprNodeList.init(args[0..i]),
|
||
// Enable tree shaking
|
||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||
.close_paren_loc = e_.close_tag_loc,
|
||
}, expr.loc);
|
||
}
|
||
// function jsxDEV(type, config, maybeKey, source, self) {
|
||
else if (runtime == .automatic) {
|
||
// --- These must be done in all cases --
|
||
const allocator = p.allocator;
|
||
var props: std.ArrayListUnmanaged(G.Property) = e_.properties.list();
|
||
|
||
const maybe_key_value: ?ExprNodeIndex =
|
||
if (e_.key_prop_index > -1) props.orderedRemove(@intCast(e_.key_prop_index)).value else null;
|
||
|
||
// arguments needs to be like
|
||
// {
|
||
// ...props,
|
||
// children: [el1, el2]
|
||
// }
|
||
|
||
{
|
||
var last_child: u32 = 0;
|
||
const children = e_.children.slice()[0..children_count];
|
||
for (children) |child| {
|
||
e_.children.ptr[last_child] = p.visitExpr(child);
|
||
// if tree-shaking removes the element, we must also remove it here.
|
||
last_child += @as(u32, @intCast(@intFromBool(e_.children.ptr[last_child].data != .e_missing)));
|
||
}
|
||
e_.children.len = last_child;
|
||
}
|
||
|
||
const children_key = Expr{ .data = jsxChildrenKeyData, .loc = expr.loc };
|
||
|
||
// Optimization: if the only non-child prop is a spread object
|
||
// we can just pass the object as the first argument
|
||
// this goes as deep as there are spreads
|
||
// <div {{...{...{...{...foo}}}}} />
|
||
// ->
|
||
// <div {{...foo}} />
|
||
// jsx("div", {...foo})
|
||
while (props.items.len == 1 and props.items[0].kind == .spread and props.items[0].value.?.data == .e_object) {
|
||
props = props.items[0].value.?.data.e_object.properties.list();
|
||
}
|
||
|
||
// Typescript defines static jsx as children.len > 1 or single spread
|
||
// https://github.com/microsoft/TypeScript/blob/d4fbc9b57d9aa7d02faac9b1e9bb7b37c687f6e9/src/compiler/transformers/jsx.ts#L340
|
||
const is_static_jsx = e_.children.len > 1 or (e_.children.len == 1 and e_.children.ptr[0].data == .e_spread);
|
||
|
||
if (is_static_jsx) {
|
||
props.append(allocator, G.Property{
|
||
.key = children_key,
|
||
.value = p.newExpr(E.Array{
|
||
.items = e_.children,
|
||
.is_single_line = e_.children.len < 2,
|
||
}, e_.close_tag_loc),
|
||
}) catch bun.outOfMemory();
|
||
} else if (e_.children.len == 1) {
|
||
props.append(allocator, G.Property{
|
||
.key = children_key,
|
||
.value = e_.children.ptr[0],
|
||
}) catch bun.outOfMemory();
|
||
}
|
||
|
||
// Either:
|
||
// jsxDEV(type, arguments, key, isStaticChildren, source, self)
|
||
// jsx(type, arguments, key)
|
||
const args = p.allocator.alloc(Expr, if (p.options.jsx.development) @as(usize, 6) else @as(usize, 2) + @as(usize, @intFromBool(maybe_key_value != null))) catch unreachable;
|
||
args[0] = tag;
|
||
|
||
args[1] = p.newExpr(E.Object{
|
||
.properties = G.Property.List.fromList(props),
|
||
}, expr.loc);
|
||
|
||
if (maybe_key_value) |key| {
|
||
args[2] = key;
|
||
} else if (p.options.jsx.development) {
|
||
// if (maybeKey !== undefined)
|
||
args[2] = Expr{
|
||
.loc = expr.loc,
|
||
.data = .{
|
||
.e_undefined = E.Undefined{},
|
||
},
|
||
};
|
||
}
|
||
|
||
if (p.options.jsx.development) {
|
||
// is the return type of the first child an array?
|
||
// It's dynamic
|
||
// Else, it's static
|
||
args[3] = Expr{
|
||
.loc = expr.loc,
|
||
.data = .{
|
||
.e_boolean = .{
|
||
.value = is_static_jsx,
|
||
},
|
||
},
|
||
};
|
||
|
||
args[4] = p.newExpr(E.Undefined{}, expr.loc);
|
||
args[5] = Expr{ .data = Prefill.Data.This, .loc = expr.loc };
|
||
}
|
||
|
||
return p.newExpr(E.Call{
|
||
.target = p.jsxImportAutomatic(expr.loc, is_static_jsx),
|
||
.args = ExprNodeList.init(args),
|
||
// Enable tree shaking
|
||
.can_be_unwrapped_if_unused = !p.options.ignore_dce_annotations,
|
||
.was_jsx_element = true,
|
||
.close_paren_loc = e_.close_tag_loc,
|
||
}, expr.loc);
|
||
} else {
|
||
unreachable;
|
||
}
|
||
},
|
||
else => unreachable,
|
||
}
|
||
},
|
||
.e_template => |e_| {
|
||
if (e_.tag) |tag| {
|
||
e_.tag = p.visitExpr(tag);
|
||
|
||
if (comptime allow_macros) {
|
||
if (e_.tag.?.data == .e_import_identifier and !p.options.features.is_macro_runtime) {
|
||
const ref = e_.tag.?.data.e_import_identifier.ref;
|
||
|
||
if (p.macro.refs.get(ref)) |import_record_id| {
|
||
const name = p.symbols.items[ref.innerIndex()].original_name;
|
||
p.ignoreUsage(ref);
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Undefined{}, e_.tag.?.loc);
|
||
}
|
||
|
||
// this ordering incase someone wants to use a macro in a node_module conditionally
|
||
if (p.options.features.no_macros) {
|
||
p.log.addError(p.source, tag.loc, "Macros are disabled") catch unreachable;
|
||
return p.newExpr(E.Undefined{}, e_.tag.?.loc);
|
||
}
|
||
|
||
if (p.source.path.isNodeModule()) {
|
||
p.log.addError(p.source, expr.loc, "For security reasons, macros cannot be run from node_modules.") catch unreachable;
|
||
return p.newExpr(E.Undefined{}, expr.loc);
|
||
}
|
||
|
||
p.macro_call_count += 1;
|
||
const record = &p.import_records.items[import_record_id];
|
||
// We must visit it to convert inline_identifiers and record usage
|
||
const macro_result = (p.options.macro_context.call(
|
||
record.path.text,
|
||
p.source.path.sourceDir(),
|
||
p.log,
|
||
p.source,
|
||
record.range,
|
||
expr,
|
||
name,
|
||
) catch return expr);
|
||
|
||
if (macro_result.data != .e_template) {
|
||
return p.visitExpr(macro_result);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
for (e_.parts) |*part| {
|
||
part.value = p.visitExpr(part.value);
|
||
}
|
||
|
||
// When mangling, inline string values into the template literal. Note that
|
||
// it may no longer be a template literal after this point (it may turn into
|
||
// a plain string literal instead).
|
||
if (p.should_fold_typescript_constant_expressions or p.options.features.inlining) {
|
||
return e_.fold(p.allocator, expr.loc);
|
||
}
|
||
},
|
||
.e_binary => |e_| {
|
||
|
||
// The handling of binary expressions is convoluted because we're using
|
||
// iteration on the heap instead of recursion on the call stack to avoid
|
||
// stack overflow for deeply-nested ASTs.
|
||
var v = BinaryExpressionVisitor{
|
||
.e = e_,
|
||
.loc = expr.loc,
|
||
.in = in,
|
||
.left_in = ExprIn{},
|
||
};
|
||
|
||
// Everything uses a single stack to reduce allocation overhead. This stack
|
||
// should almost always be very small, and almost all visits should reuse
|
||
// existing memory without allocating anything.
|
||
const stack_bottom = p.binary_expression_stack.items.len;
|
||
|
||
var current = Expr{ .data = .{ .e_binary = e_ }, .loc = v.loc };
|
||
|
||
// Iterate down into the AST along the left node of the binary operation.
|
||
// Continue iterating until we encounter something that's not a binary node.
|
||
|
||
while (true) {
|
||
if (v.checkAndPrepare(p)) |out| {
|
||
current = out;
|
||
break;
|
||
}
|
||
|
||
// Grab the arguments to our nested "visitExprInOut" call for the left
|
||
// node. We only care about deeply-nested left nodes because most binary
|
||
// operators in JavaScript are left-associative and the problematic edge
|
||
// cases we're trying to avoid crashing on have lots of left-associative
|
||
// binary operators chained together without parentheses (e.g. "1+2+...").
|
||
const left = v.e.left;
|
||
const left_in = v.left_in;
|
||
|
||
const left_binary: ?*E.Binary = if (left.data == .e_binary) left.data.e_binary else null;
|
||
|
||
// Stop iterating if iteration doesn't apply to the left node. This checks
|
||
// the assignment target because "visitExprInOut" has additional behavior
|
||
// in that case that we don't want to miss (before the top-level "switch"
|
||
// statement).
|
||
if (left_binary == null or left_in.assign_target != .none) {
|
||
v.e.left = p.visitExprInOut(left, left_in);
|
||
current = v.visitRightAndFinish(p);
|
||
break;
|
||
}
|
||
|
||
// Note that we only append to the stack (and therefore allocate memory
|
||
// on the heap) when there are nested binary expressions. A single binary
|
||
// expression doesn't add anything to the stack.
|
||
p.binary_expression_stack.append(v) catch bun.outOfMemory();
|
||
v = BinaryExpressionVisitor{
|
||
.e = left_binary.?,
|
||
.loc = left.loc,
|
||
.in = left_in,
|
||
.left_in = .{},
|
||
};
|
||
}
|
||
|
||
// Process all binary operations from the deepest-visited node back toward
|
||
// our original top-level binary operation.
|
||
while (p.binary_expression_stack.items.len > stack_bottom) {
|
||
v = p.binary_expression_stack.pop();
|
||
v.e.left = current;
|
||
current = v.visitRightAndFinish(p);
|
||
}
|
||
|
||
return current;
|
||
},
|
||
.e_index => |e_| {
|
||
const is_call_target = p.call_target == .e_index and expr.data.e_index == p.call_target.e_index;
|
||
const is_delete_target = p.delete_target == .e_index and expr.data.e_index == p.delete_target.e_index;
|
||
|
||
// "a['b']" => "a.b"
|
||
if (p.options.features.minify_syntax and
|
||
e_.index.data == .e_string and
|
||
e_.index.data.e_string.isUTF8() and
|
||
e_.index.data.e_string.isIdentifier(p.allocator))
|
||
{
|
||
const dot = p.newExpr(
|
||
E.Dot{
|
||
.name = e_.index.data.e_string.slice(p.allocator),
|
||
.name_loc = e_.index.loc,
|
||
.target = e_.target,
|
||
.optional_chain = e_.optional_chain,
|
||
},
|
||
expr.loc,
|
||
);
|
||
|
||
if (is_call_target) {
|
||
p.call_target = dot.data;
|
||
}
|
||
|
||
if (is_delete_target) {
|
||
p.delete_target = dot.data;
|
||
}
|
||
|
||
return p.visitExprInOut(dot, in);
|
||
}
|
||
|
||
const target_visited = p.visitExprInOut(e_.target, ExprIn{
|
||
.has_chain_parent = e_.optional_chain == .continuation,
|
||
});
|
||
e_.target = target_visited;
|
||
|
||
switch (e_.index.data) {
|
||
.e_private_identifier => |_private| {
|
||
var private = _private;
|
||
const name = p.loadNameFromRef(private.ref);
|
||
const result = p.findSymbol(e_.index.loc, name) catch unreachable;
|
||
private.ref = result.ref;
|
||
|
||
// Unlike regular identifiers, there are no unbound private identifiers
|
||
const kind: Symbol.Kind = p.symbols.items[result.ref.innerIndex()].kind;
|
||
var r: logger.Range = undefined;
|
||
if (!Symbol.isKindPrivate(kind)) {
|
||
r = logger.Range{ .loc = e_.index.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Private name \"{s}\" must be declared in an enclosing class", .{name}) catch unreachable;
|
||
} else {
|
||
if (in.assign_target != .none and (kind == .private_method or kind == .private_static_method)) {
|
||
r = logger.Range{ .loc = e_.index.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to read-only method \"{s}\" will throw", .{name}) catch unreachable;
|
||
} else if (in.assign_target != .none and (kind == .private_get or kind == .private_static_get)) {
|
||
r = logger.Range{ .loc = e_.index.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeWarningFmt(p.source, r, p.allocator, "Writing to getter-only property \"{s}\" will throw", .{name}) catch unreachable;
|
||
} else if (in.assign_target != .replace and (kind == .private_set or kind == .private_static_set)) {
|
||
r = logger.Range{ .loc = e_.index.loc, .len = @as(i32, @intCast(name.len)) };
|
||
p.log.addRangeWarningFmt(p.source, r, p.allocator, "Reading from setter-only property \"{s}\" will throw", .{name}) catch unreachable;
|
||
}
|
||
}
|
||
|
||
e_.index = .{ .data = .{ .e_private_identifier = private }, .loc = e_.index.loc };
|
||
},
|
||
else => {
|
||
const index = p.visitExpr(e_.index);
|
||
e_.index = index;
|
||
|
||
const unwrapped = e_.index.unwrapInlined();
|
||
if (unwrapped.data == .e_string and
|
||
unwrapped.data.e_string.isUTF8())
|
||
{
|
||
// "a['b' + '']" => "a.b"
|
||
// "enum A { B = 'b' }; a[A.B]" => "a.b"
|
||
if (p.options.features.minify_syntax and
|
||
unwrapped.data.e_string.isIdentifier(p.allocator))
|
||
{
|
||
const dot = p.newExpr(
|
||
E.Dot{
|
||
.name = unwrapped.data.e_string.slice(p.allocator),
|
||
.name_loc = unwrapped.loc,
|
||
.target = e_.target,
|
||
.optional_chain = e_.optional_chain,
|
||
},
|
||
expr.loc,
|
||
);
|
||
|
||
if (is_call_target) {
|
||
p.call_target = dot.data;
|
||
}
|
||
|
||
if (is_delete_target) {
|
||
p.delete_target = dot.data;
|
||
}
|
||
|
||
return p.visitExprInOut(dot, in);
|
||
}
|
||
|
||
// Handle property rewrites to ensure things
|
||
// like .e_import_identifier tracking works
|
||
// Reminder that this can only be done after
|
||
// `target` is visited.
|
||
if (p.maybeRewritePropertyAccess(
|
||
expr.loc,
|
||
e_.target,
|
||
unwrapped.data.e_string.data,
|
||
unwrapped.loc,
|
||
.{
|
||
.is_call_target = is_call_target,
|
||
// .is_template_tag = is_template_tag,
|
||
.is_delete_target = is_delete_target,
|
||
.assign_target = in.assign_target,
|
||
},
|
||
)) |rewrite| {
|
||
return rewrite;
|
||
}
|
||
}
|
||
},
|
||
}
|
||
|
||
const target = e_.target.unwrapInlined();
|
||
const index = e_.index.unwrapInlined();
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
if (index.data.as(.e_number)) |number| {
|
||
if (number.value >= 0 and
|
||
number.value < std.math.maxInt(usize) and
|
||
@mod(number.value, 1) == 0)
|
||
{
|
||
// "foo"[2] -> "o"
|
||
if (target.data.as(.e_string)) |str| {
|
||
if (str.isUTF8()) {
|
||
const literal = str.slice(p.allocator);
|
||
const num: usize = index.data.e_number.toUsize();
|
||
if (Environment.allow_assert) {
|
||
bun.assert(bun.strings.isAllASCII(literal));
|
||
}
|
||
if (num < literal.len) {
|
||
return p.newExpr(E.String{ .data = literal[num .. num + 1] }, expr.loc);
|
||
}
|
||
}
|
||
} else if (target.data.as(.e_array)) |array| {
|
||
// [x][0] -> x
|
||
if (array.items.len == 1 and number.value == 0) {
|
||
const inlined = target.data.e_array.items.at(0).*;
|
||
if (inlined.canBeInlinedFromPropertyAccess())
|
||
return inlined;
|
||
}
|
||
|
||
// ['a', 'b', 'c'][1] -> 'b'
|
||
const int: usize = @intFromFloat(number.value);
|
||
if (int < array.items.len and p.exprCanBeRemovedIfUnused(&target)) {
|
||
const inlined = target.data.e_array.items.at(int).*;
|
||
// ['a', , 'c'][1] -> undefined
|
||
if (inlined.data == .e_missing) return p.newExpr(E.Undefined{}, inlined.loc);
|
||
if (Environment.allow_assert) assert(inlined.canBeInlinedFromPropertyAccess());
|
||
return inlined;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Create an error for assigning to an import namespace when bundling. Even
|
||
// though this is a run-time error, we make it a compile-time error when
|
||
// bundling because scope hoisting means these will no longer be run-time
|
||
// errors.
|
||
if ((in.assign_target != .none or is_delete_target) and
|
||
@as(Expr.Tag, e_.target.data) == .e_identifier and
|
||
p.symbols.items[e_.target.data.e_identifier.ref.innerIndex()].kind == .import)
|
||
{
|
||
const r = js_lexer.rangeOfIdentifier(p.source, e_.target.loc);
|
||
p.log.addRangeErrorFmt(
|
||
p.source,
|
||
r,
|
||
p.allocator,
|
||
"Cannot assign to property on import \"{s}\"",
|
||
.{p.symbols.items[e_.target.data.e_identifier.ref.innerIndex()].original_name},
|
||
) catch unreachable;
|
||
}
|
||
|
||
return p.newExpr(e_, expr.loc);
|
||
},
|
||
.e_unary => |e_| {
|
||
switch (e_.op) {
|
||
.un_typeof => {
|
||
const id_before = e_.value.data == .e_identifier;
|
||
e_.value = p.visitExprInOut(e_.value, ExprIn{ .assign_target = e_.op.unaryAssignTarget() });
|
||
const id_after = e_.value.data == .e_identifier;
|
||
|
||
// The expression "typeof (0, x)" must not become "typeof x" if "x"
|
||
// is unbound because that could suppress a ReferenceError from "x"
|
||
if (!id_before and id_after and p.symbols.items[e_.value.data.e_identifier.ref.innerIndex()].kind == .unbound) {
|
||
e_.value = Expr.joinWithComma(
|
||
Expr{ .loc = e_.value.loc, .data = Prefill.Data.Zero },
|
||
e_.value,
|
||
p.allocator,
|
||
);
|
||
}
|
||
|
||
if (e_.value.data == .e_require_call_target) {
|
||
p.ignoreUsageOfRuntimeRequire();
|
||
return p.newExpr(E.String{ .data = "function" }, expr.loc);
|
||
}
|
||
|
||
if (SideEffects.typeof(e_.value.data)) |typeof| {
|
||
return p.newExpr(E.String{ .data = typeof }, expr.loc);
|
||
}
|
||
},
|
||
.un_delete => {
|
||
e_.value = p.visitExprInOut(e_.value, ExprIn{ .has_chain_parent = true });
|
||
},
|
||
else => {
|
||
e_.value = p.visitExprInOut(e_.value, ExprIn{ .assign_target = e_.op.unaryAssignTarget() });
|
||
|
||
// Post-process the unary expression
|
||
switch (e_.op) {
|
||
.un_not => {
|
||
if (p.options.features.minify_syntax)
|
||
e_.value = SideEffects.simplifyBoolean(p, e_.value);
|
||
|
||
const side_effects = SideEffects.toBoolean(p, e_.value.data);
|
||
if (side_effects.ok) {
|
||
return p.newExpr(E.Boolean{ .value = !side_effects.value }, expr.loc);
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
if (e_.value.maybeSimplifyNot(p.allocator)) |exp| {
|
||
return exp;
|
||
}
|
||
if (e_.value.data == .e_import_meta_main) {
|
||
e_.value.data.e_import_meta_main.inverted = !e_.value.data.e_import_meta_main.inverted;
|
||
return e_.value;
|
||
}
|
||
}
|
||
},
|
||
.un_cpl => {
|
||
if (p.should_fold_typescript_constant_expressions) {
|
||
if (SideEffects.toNumber(e_.value.data)) |value| {
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt(~floatToInt32(value)),
|
||
}, expr.loc);
|
||
}
|
||
}
|
||
},
|
||
.un_void => {
|
||
if (p.exprCanBeRemovedIfUnused(&e_.value)) {
|
||
return p.newExpr(E.Undefined{}, e_.value.loc);
|
||
}
|
||
},
|
||
.un_pos => {
|
||
if (SideEffects.toNumber(e_.value.data)) |num| {
|
||
return p.newExpr(E.Number{ .value = num }, expr.loc);
|
||
}
|
||
},
|
||
.un_neg => {
|
||
if (SideEffects.toNumber(e_.value.data)) |num| {
|
||
return p.newExpr(E.Number{ .value = -num }, expr.loc);
|
||
}
|
||
},
|
||
|
||
////////////////////////////////////////////////////////////////////////////////
|
||
|
||
.un_pre_dec => {
|
||
// TODO: private fields
|
||
},
|
||
.un_pre_inc => {
|
||
// TODO: private fields
|
||
},
|
||
.un_post_dec => {
|
||
// TODO: private fields
|
||
},
|
||
.un_post_inc => {
|
||
// TODO: private fields
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
// "-(a, b)" => "a, -b"
|
||
if (switch (e_.op) {
|
||
.un_delete, .un_typeof => false,
|
||
else => true,
|
||
}) {
|
||
switch (e_.value.data) {
|
||
.e_binary => |comma| {
|
||
if (comma.op == .bin_comma) {
|
||
return Expr.joinWithComma(
|
||
comma.left,
|
||
p.newExpr(
|
||
E.Unary{
|
||
.op = e_.op,
|
||
.value = comma.right,
|
||
},
|
||
comma.right.loc,
|
||
),
|
||
p.allocator,
|
||
);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
}
|
||
},
|
||
.e_dot => |e_| {
|
||
const is_delete_target = @as(Expr.Tag, p.delete_target) == .e_dot and expr.data.e_dot == p.delete_target.e_dot;
|
||
const is_call_target = @as(Expr.Tag, p.call_target) == .e_dot and expr.data.e_dot == p.call_target.e_dot;
|
||
|
||
if (p.define.dots.get(e_.name)) |parts| {
|
||
for (parts) |define| {
|
||
if (p.isDotDefineMatch(expr, define.parts)) {
|
||
if (in.assign_target == .none) {
|
||
// Substitute user-specified defines
|
||
if (!define.data.valueless) {
|
||
return p.valueForDefine(expr.loc, in.assign_target, is_delete_target, &define.data);
|
||
}
|
||
|
||
if (define.data.method_call_must_be_replaced_with_undefined and in.property_access_for_method_call_maybe_should_replace_with_undefined) {
|
||
p.method_call_must_be_replaced_with_undefined = true;
|
||
}
|
||
}
|
||
|
||
// Copy the side effect flags over in case this expression is unused
|
||
if (define.data.can_be_removed_if_unused) {
|
||
e_.can_be_removed_if_unused = true;
|
||
}
|
||
|
||
if (define.data.call_can_be_unwrapped_if_unused and !p.options.ignore_dce_annotations) {
|
||
e_.call_can_be_unwrapped_if_unused = true;
|
||
}
|
||
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Track ".then().catch()" chains
|
||
if (is_call_target and @as(Expr.Tag, p.then_catch_chain.next_target) == .e_dot and p.then_catch_chain.next_target.e_dot == expr.data.e_dot) {
|
||
if (strings.eqlComptime(e_.name, "catch")) {
|
||
p.then_catch_chain = ThenCatchChain{
|
||
.next_target = e_.target.data,
|
||
.has_catch = true,
|
||
};
|
||
} else if (strings.eqlComptime(e_.name, "then")) {
|
||
p.then_catch_chain = ThenCatchChain{
|
||
.next_target = e_.target.data,
|
||
.has_catch = p.then_catch_chain.has_catch or p.then_catch_chain.has_multiple_args,
|
||
};
|
||
}
|
||
}
|
||
|
||
e_.target = p.visitExprInOut(e_.target, .{
|
||
.property_access_for_method_call_maybe_should_replace_with_undefined = in.property_access_for_method_call_maybe_should_replace_with_undefined,
|
||
});
|
||
|
||
// 'require.resolve' -> .e_require_resolve_call_target
|
||
if (e_.target.data == .e_require_call_target and
|
||
strings.eqlComptime(e_.name, "resolve"))
|
||
{
|
||
// we do not need to call p.recordUsageOfRuntimeRequire(); because `require`
|
||
// was not a call target. even if the call target is `require.resolve`, it should be set.
|
||
return .{
|
||
.data = .{
|
||
.e_require_resolve_call_target = {},
|
||
},
|
||
.loc = expr.loc,
|
||
};
|
||
}
|
||
|
||
if (e_.optional_chain == null) {
|
||
if (p.maybeRewritePropertyAccess(
|
||
expr.loc,
|
||
e_.target,
|
||
e_.name,
|
||
e_.name_loc,
|
||
.{
|
||
.is_call_target = is_call_target,
|
||
.assign_target = in.assign_target,
|
||
.is_delete_target = is_delete_target,
|
||
// .is_template_tag = p.template_tag != null,
|
||
},
|
||
)) |_expr| {
|
||
return _expr;
|
||
}
|
||
|
||
if (comptime allow_macros) {
|
||
if (!p.options.features.is_macro_runtime) {
|
||
if (p.macro_call_count > 0 and e_.target.data == .e_object and e_.target.data.e_object.was_originally_macro) {
|
||
if (e_.target.get(e_.name)) |obj| {
|
||
return obj;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.e_if => |e_| {
|
||
const is_call_target = @as(Expr.Data, p.call_target) == .e_if and expr.data.e_if == p.call_target.e_if;
|
||
|
||
e_.test_ = p.visitExpr(e_.test_);
|
||
|
||
e_.test_ = SideEffects.simplifyBoolean(p, e_.test_);
|
||
|
||
const side_effects = SideEffects.toBoolean(p, e_.test_.data);
|
||
|
||
if (!side_effects.ok) {
|
||
e_.yes = p.visitExpr(e_.yes);
|
||
e_.no = p.visitExpr(e_.no);
|
||
} else {
|
||
// Mark the control flow as dead if the branch is never taken
|
||
if (side_effects.value) {
|
||
// "true ? live : dead"
|
||
e_.yes = p.visitExpr(e_.yes);
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
e_.no = p.visitExpr(e_.no);
|
||
p.is_control_flow_dead = old;
|
||
|
||
if (side_effects.side_effects == .could_have_side_effects) {
|
||
return Expr.joinWithComma(SideEffects.simplifyUnusedExpr(p, e_.test_) orelse p.newExpr(E.Missing{}, e_.test_.loc), e_.yes, p.allocator);
|
||
}
|
||
|
||
// "(1 ? fn : 2)()" => "fn()"
|
||
// "(1 ? this.fn : 2)" => "this.fn"
|
||
// "(1 ? this.fn : 2)()" => "(0, this.fn)()"
|
||
if (is_call_target and e_.yes.hasValueForThisInCall()) {
|
||
return p.newExpr(E.Number{ .value = 0 }, e_.test_.loc).joinWithComma(e_.yes, p.allocator);
|
||
}
|
||
|
||
return e_.yes;
|
||
} else {
|
||
// "false ? dead : live"
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
e_.yes = p.visitExpr(e_.yes);
|
||
p.is_control_flow_dead = old;
|
||
e_.no = p.visitExpr(e_.no);
|
||
|
||
// "(a, false) ? b : c" => "a, c"
|
||
if (side_effects.side_effects == .could_have_side_effects) {
|
||
return Expr.joinWithComma(SideEffects.simplifyUnusedExpr(p, e_.test_) orelse p.newExpr(E.Missing{}, e_.test_.loc), e_.no, p.allocator);
|
||
}
|
||
|
||
// "(1 ? fn : 2)()" => "fn()"
|
||
// "(1 ? this.fn : 2)" => "this.fn"
|
||
// "(1 ? this.fn : 2)()" => "(0, this.fn)()"
|
||
if (is_call_target and e_.no.hasValueForThisInCall()) {
|
||
return p.newExpr(E.Number{ .value = 0 }, e_.test_.loc).joinWithComma(e_.no, p.allocator);
|
||
}
|
||
return e_.no;
|
||
}
|
||
}
|
||
},
|
||
.e_await => |e_| {
|
||
p.await_target = e_.value.data;
|
||
e_.value = p.visitExpr(e_.value);
|
||
},
|
||
.e_yield => |e_| {
|
||
if (e_.value) |val| {
|
||
e_.value = p.visitExpr(val);
|
||
}
|
||
},
|
||
.e_array => |e_| {
|
||
if (in.assign_target != .none) {
|
||
p.maybeCommaSpreadError(e_.comma_after_spread);
|
||
}
|
||
const items = e_.items.slice();
|
||
var spread_item_count: usize = 0;
|
||
for (items) |*item| {
|
||
switch (item.data) {
|
||
.e_missing => {},
|
||
.e_spread => |spread| {
|
||
spread.value = p.visitExprInOut(spread.value, ExprIn{ .assign_target = in.assign_target });
|
||
|
||
spread_item_count += if (spread.value.data == .e_array)
|
||
@as(usize, spread.value.data.e_array.items.len)
|
||
else
|
||
0;
|
||
},
|
||
.e_binary => |e2| {
|
||
if (in.assign_target != .none and e2.op == .bin_assign) {
|
||
const was_anonymous_named_expr = e2.right.isAnonymousNamed();
|
||
e2.left = p.visitExprInOut(e2.left, ExprIn{ .assign_target = .replace });
|
||
e2.right = p.visitExpr(e2.right);
|
||
|
||
if (@as(Expr.Tag, e2.left.data) == .e_identifier) {
|
||
e2.right = p.maybeKeepExprSymbolName(
|
||
e2.right,
|
||
p.symbols.items[e2.left.data.e_identifier.ref.innerIndex()].original_name,
|
||
was_anonymous_named_expr,
|
||
);
|
||
}
|
||
} else {
|
||
item.* = p.visitExprInOut(item.*, ExprIn{ .assign_target = in.assign_target });
|
||
}
|
||
},
|
||
else => {
|
||
item.* = p.visitExprInOut(item.*, ExprIn{ .assign_target = in.assign_target });
|
||
},
|
||
}
|
||
}
|
||
|
||
// "[1, ...[2, 3], 4]" => "[1, 2, 3, 4]"
|
||
if (p.options.features.minify_syntax and spread_item_count > 0 and in.assign_target == .none) {
|
||
e_.items = e_.inlineSpreadOfArrayLiterals(p.allocator, spread_item_count) catch e_.items;
|
||
}
|
||
},
|
||
.e_object => |e_| {
|
||
if (in.assign_target != .none) {
|
||
p.maybeCommaSpreadError(e_.comma_after_spread);
|
||
}
|
||
|
||
var has_spread = false;
|
||
var has_proto = false;
|
||
for (e_.properties.slice()) |*property| {
|
||
if (property.kind != .spread) {
|
||
property.key = p.visitExpr(property.key orelse Output.panic("Expected property key", .{}));
|
||
const key = property.key.?;
|
||
// Forbid duplicate "__proto__" properties according to the specification
|
||
if (!property.flags.contains(.is_computed) and
|
||
!property.flags.contains(.was_shorthand) and
|
||
!property.flags.contains(.is_method) and
|
||
in.assign_target == .none and
|
||
key.data.isStringValue() and
|
||
strings.eqlComptime(
|
||
// __proto__ is utf8, assume it lives in refs
|
||
key.data.e_string.slice(p.allocator),
|
||
"__proto__",
|
||
)) {
|
||
if (has_proto) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, key.loc);
|
||
p.log.addRangeError(p.source, r, "Cannot specify the \"__proto__\" property more than once per object") catch unreachable;
|
||
}
|
||
has_proto = true;
|
||
}
|
||
} else {
|
||
has_spread = true;
|
||
}
|
||
|
||
// Extract the initializer for expressions like "({ a: b = c } = d)"
|
||
if (in.assign_target != .none and property.initializer == null and property.value != null) {
|
||
switch (property.value.?.data) {
|
||
.e_binary => |bin| {
|
||
if (bin.op == .bin_assign) {
|
||
property.initializer = bin.right;
|
||
property.value = bin.left;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
if (property.value != null) {
|
||
property.value = p.visitExprInOut(property.value.?, ExprIn{ .assign_target = in.assign_target });
|
||
}
|
||
|
||
if (property.initializer != null) {
|
||
const was_anonymous_named_expr = property.initializer.?.isAnonymousNamed();
|
||
property.initializer = p.visitExpr(property.initializer.?);
|
||
|
||
if (property.value) |val| {
|
||
if (@as(Expr.Tag, val.data) == .e_identifier) {
|
||
property.initializer = p.maybeKeepExprSymbolName(
|
||
property.initializer orelse unreachable,
|
||
p.symbols.items[val.data.e_identifier.ref.innerIndex()].original_name,
|
||
was_anonymous_named_expr,
|
||
);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.e_import => |e_| {
|
||
// We want to forcefully fold constants inside of imports
|
||
// even when minification is disabled, so that if we have an
|
||
// import based on a string template, it does not cause a
|
||
// bundle error. This is especially relevant for bundling NAPI
|
||
// modules with 'bun build --compile':
|
||
//
|
||
// const binding = await import(`./${process.platform}-${process.arch}.node`);
|
||
//
|
||
const prev_should_fold_typescript_constant_expressions = true;
|
||
defer p.should_fold_typescript_constant_expressions = prev_should_fold_typescript_constant_expressions;
|
||
p.should_fold_typescript_constant_expressions = true;
|
||
|
||
e_.expr = p.visitExpr(e_.expr);
|
||
e_.options = p.visitExpr(e_.options);
|
||
|
||
// Import transposition is able to duplicate the options structure, so
|
||
// only perform it if the expression is side effect free.
|
||
//
|
||
// TODO: make this more like esbuild by emitting warnings that explain
|
||
// why this import was not analyzed. (see esbuild 'unsupported-dynamic-import')
|
||
if (p.exprCanBeRemovedIfUnused(&e_.options)) {
|
||
const state = TransposeState{
|
||
.is_await_target = if (p.await_target) |await_target|
|
||
await_target == .e_import and await_target.e_import == e_
|
||
else
|
||
false,
|
||
|
||
.is_then_catch_target = p.then_catch_chain.has_catch and
|
||
p.then_catch_chain.next_target == .e_import and
|
||
expr.data.e_import == p.then_catch_chain.next_target.e_import,
|
||
|
||
.import_options = e_.options,
|
||
|
||
.loc = e_.expr.loc,
|
||
.import_record_tag = e_.importRecordTag(),
|
||
};
|
||
|
||
return p.import_transposer.maybeTransposeIf(e_.expr, &state);
|
||
}
|
||
},
|
||
.e_call => |e_| {
|
||
p.call_target = e_.target.data;
|
||
|
||
p.then_catch_chain = ThenCatchChain{
|
||
.next_target = e_.target.data,
|
||
.has_multiple_args = e_.args.len >= 2,
|
||
.has_catch = @as(Expr.Tag, p.then_catch_chain.next_target) == .e_call and p.then_catch_chain.next_target.e_call == expr.data.e_call and p.then_catch_chain.has_catch,
|
||
};
|
||
|
||
const target_was_identifier_before_visit = e_.target.data == .e_identifier;
|
||
e_.target = p.visitExprInOut(e_.target, .{
|
||
.has_chain_parent = e_.optional_chain == .continuation,
|
||
.property_access_for_method_call_maybe_should_replace_with_undefined = true,
|
||
});
|
||
|
||
// Copy the call side effect flag over if this is a known target
|
||
switch (e_.target.data) {
|
||
.e_identifier => |ident| {
|
||
e_.can_be_unwrapped_if_unused = e_.can_be_unwrapped_if_unused or ident.call_can_be_unwrapped_if_unused;
|
||
|
||
// Detect if this is a direct eval. Note that "(1 ? eval : 0)(x)" will
|
||
// become "eval(x)" after we visit the target due to dead code elimination,
|
||
// but that doesn't mean it should become a direct eval.
|
||
//
|
||
// Note that "eval?.(x)" is considered an indirect eval. There was debate
|
||
// about this after everyone implemented it as a direct eval, but the
|
||
// language committee said it was indirect and everyone had to change it:
|
||
// https://github.com/tc39/ecma262/issues/2062.
|
||
if (e_.optional_chain == null and
|
||
target_was_identifier_before_visit and
|
||
strings.eqlComptime(
|
||
p.symbols.items[e_.target.data.e_identifier.ref.inner_index].original_name,
|
||
"eval",
|
||
)) {
|
||
e_.is_direct_eval = true;
|
||
|
||
// Pessimistically assume that if this looks like a CommonJS module
|
||
// (e.g. no "export" keywords), a direct call to "eval" means that
|
||
// code could potentially access "module" or "exports".
|
||
if (p.options.bundle and !p.is_file_considered_to_have_esm_exports) {
|
||
p.recordUsage(p.module_ref);
|
||
p.recordUsage(p.exports_ref);
|
||
}
|
||
|
||
var scope_iter: ?*js_ast.Scope = p.current_scope;
|
||
while (scope_iter) |scope| : (scope_iter = scope.parent) {
|
||
scope.contains_direct_eval = true;
|
||
}
|
||
|
||
// TODO: Log a build note for this like esbuild does
|
||
}
|
||
},
|
||
.e_dot => |dot| {
|
||
e_.can_be_unwrapped_if_unused = e_.can_be_unwrapped_if_unused or dot.call_can_be_unwrapped_if_unused;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
const is_macro_ref: bool = if (comptime FeatureFlags.is_macro_enabled)
|
||
e_.target.data == .e_import_identifier and p.macro.refs.contains(e_.target.data.e_import_identifier.ref)
|
||
else
|
||
false;
|
||
|
||
{
|
||
const old_ce = p.options.ignore_dce_annotations;
|
||
defer p.options.ignore_dce_annotations = old_ce;
|
||
const old_should_fold_typescript_constant_expressions = p.should_fold_typescript_constant_expressions;
|
||
defer p.should_fold_typescript_constant_expressions = old_should_fold_typescript_constant_expressions;
|
||
const old_is_control_flow_dead = p.is_control_flow_dead;
|
||
|
||
// We want to forcefully fold constants inside of
|
||
// certain calls even when minification is disabled, so
|
||
// that if we have an import based on a string template,
|
||
// it does not cause a bundle error. This is relevant for
|
||
// macros, as they require constant known values, but also
|
||
// for `require` and `require.resolve`, as they go through
|
||
// the module resolver.
|
||
if (is_macro_ref or
|
||
e_.target.data == .e_require_call_target or
|
||
e_.target.data == .e_require_resolve_call_target)
|
||
{
|
||
p.options.ignore_dce_annotations = true;
|
||
p.should_fold_typescript_constant_expressions = true;
|
||
}
|
||
|
||
var method_call_should_be_replaced_with_undefined = p.method_call_must_be_replaced_with_undefined;
|
||
|
||
if (method_call_should_be_replaced_with_undefined) {
|
||
p.method_call_must_be_replaced_with_undefined = false;
|
||
switch (e_.target.data) {
|
||
// If we're removing this call, don't count any arguments as symbol uses
|
||
.e_index, .e_dot => {
|
||
p.is_control_flow_dead = true;
|
||
},
|
||
else => {
|
||
method_call_should_be_replaced_with_undefined = false;
|
||
},
|
||
}
|
||
}
|
||
|
||
for (e_.args.slice()) |*arg| {
|
||
arg.* = p.visitExpr(arg.*);
|
||
}
|
||
|
||
if (method_call_should_be_replaced_with_undefined) {
|
||
p.is_control_flow_dead = old_is_control_flow_dead;
|
||
return .{ .data = .{ .e_undefined = .{} }, .loc = expr.loc };
|
||
}
|
||
}
|
||
|
||
if (e_.target.data == .e_require_call_target) {
|
||
e_.can_be_unwrapped_if_unused = false;
|
||
|
||
// Heuristic: omit warnings inside try/catch blocks because presumably
|
||
// the try/catch statement is there to handle the potential run-time
|
||
// error from the unbundled require() call failing.
|
||
if (e_.args.len == 1) {
|
||
const first = e_.args.first_();
|
||
const state = TransposeState{
|
||
.is_require_immediately_assigned_to_decl = in.is_immediately_assigned_to_decl and
|
||
first.data == .e_string,
|
||
};
|
||
switch (first.data) {
|
||
.e_string => {
|
||
// require(FOO) => require(FOO)
|
||
return p.transposeRequire(first, &state);
|
||
},
|
||
.e_if => {
|
||
// require(FOO ? '123' : '456') => FOO ? require('123') : require('456')
|
||
// This makes static analysis later easier
|
||
return p.require_transposer.transposeKnownToBeIf(first, &state);
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
// Ignore calls to require() if the control flow is provably
|
||
// dead here. We don't want to spend time scanning the required files
|
||
// if they will never be used.
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Null{}, expr.loc);
|
||
}
|
||
|
||
if (p.options.warn_about_unbundled_modules) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, e_.target.loc);
|
||
p.log.addRangeDebug(p.source, r, "This call to \"require\" will not be bundled because it has multiple arguments") catch unreachable;
|
||
}
|
||
|
||
if (p.options.features.allow_runtime) {
|
||
p.recordUsageOfRuntimeRequire();
|
||
}
|
||
|
||
return expr;
|
||
}
|
||
|
||
if (e_.target.data == .e_require_resolve_call_target) {
|
||
// Ignore calls to require.resolve() if the control flow is provably
|
||
// dead here. We don't want to spend time scanning the required files
|
||
// if they will never be used.
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Null{}, expr.loc);
|
||
}
|
||
|
||
if (e_.args.len == 1) {
|
||
const first = e_.args.first_();
|
||
switch (first.data) {
|
||
.e_string => {
|
||
// require.resolve(FOO) => require.resolve(FOO)
|
||
// (this will register dependencies)
|
||
return p.transposeRequireResolveKnownString(first);
|
||
},
|
||
.e_if => {
|
||
// require.resolve(FOO ? '123' : '456')
|
||
// =>
|
||
// FOO ? require.resolve('123') : require.resolve('456')
|
||
// This makes static analysis later easier
|
||
return p.require_resolve_transposer.transposeKnownToBeIf(first, e_.target);
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
return expr;
|
||
}
|
||
|
||
if (comptime allow_macros) {
|
||
if (is_macro_ref and !p.options.features.is_macro_runtime) {
|
||
const ref = e_.target.data.e_import_identifier.ref;
|
||
const import_record_id = p.macro.refs.get(ref).?;
|
||
p.ignoreUsage(ref);
|
||
if (p.is_control_flow_dead) {
|
||
return p.newExpr(E.Undefined{}, e_.target.loc);
|
||
}
|
||
|
||
if (p.options.features.no_macros) {
|
||
p.log.addError(p.source, expr.loc, "Macros are disabled") catch unreachable;
|
||
return p.newExpr(E.Undefined{}, expr.loc);
|
||
}
|
||
|
||
if (p.source.path.isNodeModule()) {
|
||
p.log.addError(p.source, expr.loc, "For security reasons, macros cannot be run from node_modules.") catch unreachable;
|
||
return p.newExpr(E.Undefined{}, expr.loc);
|
||
}
|
||
|
||
const name = p.symbols.items[ref.innerIndex()].original_name;
|
||
const record = &p.import_records.items[import_record_id];
|
||
const copied = Expr{ .loc = expr.loc, .data = .{ .e_call = e_ } };
|
||
const start_error_count = p.log.msgs.items.len;
|
||
p.macro_call_count += 1;
|
||
const macro_result = p.options.macro_context.call(
|
||
record.path.text,
|
||
p.source.path.sourceDir(),
|
||
p.log,
|
||
p.source,
|
||
record.range,
|
||
copied,
|
||
name,
|
||
) catch |err| {
|
||
if (err == error.MacroFailed) {
|
||
if (p.log.msgs.items.len == start_error_count) {
|
||
p.log.addError(p.source, expr.loc, "macro threw exception") catch unreachable;
|
||
}
|
||
} else {
|
||
p.log.addErrorFmt(p.source, expr.loc, p.allocator, "\"{s}\" error in macro", .{@errorName(err)}) catch unreachable;
|
||
}
|
||
return expr;
|
||
};
|
||
|
||
if (macro_result.data != .e_call) {
|
||
return p.visitExpr(macro_result);
|
||
}
|
||
}
|
||
}
|
||
|
||
// In fast refresh, any function call that looks like a hook (/^use[A-Z]/) is a
|
||
// hook, even if it is not the value of `SExpr` or `SLocal`. It can be anywhere
|
||
// in the function call. This makes sense for some weird situations with `useCallback`,
|
||
// where it is not assigned to a variable.
|
||
//
|
||
// When we see a hook call, we need to hash it, and then mark a flag so that if
|
||
// it is assigned to a variable, that variable also get's hashed.
|
||
if (p.options.features.react_fast_refresh) try_record_hook: {
|
||
const original_name = switch (e_.target.data) {
|
||
inline .e_identifier,
|
||
.e_import_identifier,
|
||
.e_commonjs_export_identifier,
|
||
=> |id| p.symbols.items[id.ref.innerIndex()].original_name,
|
||
.e_dot => |dot| dot.name,
|
||
else => break :try_record_hook,
|
||
};
|
||
if (!ReactRefresh.isHookName(original_name)) break :try_record_hook;
|
||
p.handleReactRefreshHookCall(e_, original_name);
|
||
}
|
||
|
||
// Implement constant folding for 'string'.charCodeAt(n)
|
||
if (e_.args.len == 1) if (e_.target.data.as(.e_dot)) |dot| {
|
||
if (dot.target.data == .e_string and
|
||
dot.target.data.e_string.isUTF8() and
|
||
bun.strings.eqlComptime(dot.name, "charCodeAt"))
|
||
{
|
||
const str = dot.target.data.e_string.data;
|
||
const arg1 = e_.args.at(0).unwrapInlined();
|
||
if (arg1.data == .e_number) {
|
||
const float = arg1.data.e_number.value;
|
||
if (@mod(float, 1) == 0 and
|
||
float < @as(f64, @floatFromInt(str.len)) and
|
||
float >= 0)
|
||
{
|
||
const char = str[@intFromFloat(float)];
|
||
if (char < 0x80) {
|
||
return p.newExpr(E.Number{
|
||
.value = @floatFromInt(char),
|
||
}, expr.loc);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
return expr;
|
||
},
|
||
.e_new => |e_| {
|
||
e_.target = p.visitExpr(e_.target);
|
||
|
||
for (e_.args.slice()) |*arg| {
|
||
arg.* = p.visitExpr(arg.*);
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
KnownGlobal.maybeMarkConstructorAsPure(e_, p.symbols.items);
|
||
}
|
||
},
|
||
.e_arrow => |e_| {
|
||
if (p.is_revisit_for_substitution) {
|
||
return expr;
|
||
}
|
||
|
||
const old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_visit);
|
||
p.fn_or_arrow_data_visit = FnOrArrowDataVisit{
|
||
.is_arrow = true,
|
||
.is_async = e_.is_async,
|
||
};
|
||
|
||
// Mark if we're inside an async arrow function. This value should be true
|
||
// even if we're inside multiple arrow functions and the closest inclosing
|
||
// arrow function isn't async, as long as at least one enclosing arrow
|
||
// function within the current enclosing function is async.
|
||
const old_inside_async_arrow_fn = p.fn_only_data_visit.is_inside_async_arrow_fn;
|
||
p.fn_only_data_visit.is_inside_async_arrow_fn = e_.is_async or p.fn_only_data_visit.is_inside_async_arrow_fn;
|
||
|
||
p.pushScopeForVisitPass(.function_args, expr.loc) catch unreachable;
|
||
const dupe = p.allocator.dupe(Stmt, e_.body.stmts) catch unreachable;
|
||
|
||
p.visitArgs(e_.args, VisitArgsOpts{
|
||
.has_rest_arg = e_.has_rest_arg,
|
||
.body = dupe,
|
||
.is_unique_formal_parameters = true,
|
||
});
|
||
p.pushScopeForVisitPass(.function_body, e_.body.loc) catch unreachable;
|
||
|
||
var react_hook_data: ?ReactRefresh.HookContext = null;
|
||
const prev = p.react_refresh.hook_ctx_storage;
|
||
defer p.react_refresh.hook_ctx_storage = prev;
|
||
p.react_refresh.hook_ctx_storage = &react_hook_data;
|
||
|
||
var stmts_list = ListManaged(Stmt).fromOwnedSlice(p.allocator, dupe);
|
||
var temp_opts = PrependTempRefsOpts{ .kind = .fn_body };
|
||
p.visitStmtsAndPrependTempRefs(&stmts_list, &temp_opts) catch unreachable;
|
||
p.allocator.free(e_.body.stmts);
|
||
e_.body.stmts = stmts_list.items;
|
||
p.popScope();
|
||
p.popScope();
|
||
|
||
p.fn_only_data_visit.is_inside_async_arrow_fn = old_inside_async_arrow_fn;
|
||
p.fn_or_arrow_data_visit = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_visit), &old_fn_or_arrow_data);
|
||
|
||
if (react_hook_data) |*hook| try_mark_hook: {
|
||
const stmts = p.nearest_stmt_list orelse break :try_mark_hook;
|
||
stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory();
|
||
|
||
p.handleReactRefreshPostVisitFunctionBody(&stmts_list, hook);
|
||
e_.body.stmts = stmts_list.items;
|
||
|
||
return p.getReactRefreshHookSignalInit(hook, expr);
|
||
}
|
||
},
|
||
.e_function => |e_| {
|
||
if (p.is_revisit_for_substitution) {
|
||
return expr;
|
||
}
|
||
|
||
var react_hook_data: ?ReactRefresh.HookContext = null;
|
||
const prev = p.react_refresh.hook_ctx_storage;
|
||
defer p.react_refresh.hook_ctx_storage = prev;
|
||
p.react_refresh.hook_ctx_storage = &react_hook_data;
|
||
|
||
e_.func = p.visitFunc(e_.func, expr.loc);
|
||
|
||
var final_expr = expr;
|
||
|
||
if (react_hook_data) |*hook| try_mark_hook: {
|
||
const stmts = p.nearest_stmt_list orelse break :try_mark_hook;
|
||
stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory();
|
||
final_expr = p.getReactRefreshHookSignalInit(hook, expr);
|
||
}
|
||
|
||
if (e_.func.name) |name| {
|
||
final_expr = p.keepExprSymbolName(final_expr, p.symbols.items[name.ref.?.innerIndex()].original_name);
|
||
}
|
||
|
||
return final_expr;
|
||
},
|
||
.e_class => |e_| {
|
||
if (p.is_revisit_for_substitution) {
|
||
return expr;
|
||
}
|
||
|
||
_ = p.visitClass(expr.loc, e_, Ref.None);
|
||
},
|
||
else => {},
|
||
}
|
||
return expr;
|
||
}
|
||
|
||
/// This is only allowed to be called if allow_runtime is true
|
||
/// If --target=bun, this does nothing.
|
||
fn recordUsageOfRuntimeRequire(p: *P) void {
|
||
// target bun does not have __require
|
||
if (p.options.features.auto_polyfill_require) {
|
||
bun.assert(p.options.features.allow_runtime);
|
||
|
||
p.ensureRequireSymbol();
|
||
p.recordUsage(p.runtimeIdentifierRef(logger.Loc.Empty, "__require"));
|
||
}
|
||
}
|
||
|
||
fn ignoreUsageOfRuntimeRequire(p: *P) void {
|
||
if (p.options.features.auto_polyfill_require) {
|
||
bun.assert(p.runtime_imports.__require != null);
|
||
p.ignoreUsage(p.runtimeIdentifierRef(logger.Loc.Empty, "__require"));
|
||
p.symbols.items[p.require_ref.innerIndex()].use_count_estimate -|= 1;
|
||
}
|
||
}
|
||
|
||
inline fn valueForRequire(p: *P, loc: logger.Loc) Expr {
|
||
bun.assert(!p.isSourceRuntime());
|
||
return Expr{
|
||
.data = .{
|
||
.e_require_call_target = {},
|
||
},
|
||
.loc = loc,
|
||
};
|
||
}
|
||
|
||
inline fn valueForImportMetaMain(p: *P, inverted: bool, loc: logger.Loc) Expr {
|
||
if (p.options.import_meta_main_value) |known| {
|
||
return .{ .loc = loc, .data = .{ .e_boolean = .{ .value = if (inverted) !known else known } } };
|
||
} else {
|
||
// Node.js does not have import.meta.main, so we end up lowering
|
||
// this to `require.main === module`, but with the ESM format,
|
||
// both `require` and `module` are not present, so the code
|
||
// generation we need is:
|
||
//
|
||
// import { createRequire } from "node:module";
|
||
// var __require = createRequire(import.meta.url);
|
||
// var import_meta_main = __require.main === __require.module;
|
||
//
|
||
// The printer can handle this for us, but we need to reference
|
||
// a handle to the `__require` function.
|
||
if (p.options.lower_import_meta_main_for_node_js) {
|
||
p.recordUsageOfRuntimeRequire();
|
||
}
|
||
|
||
return .{
|
||
.loc = loc,
|
||
.data = .{ .e_import_meta_main = .{ .inverted = inverted } },
|
||
};
|
||
}
|
||
}
|
||
|
||
fn visitArgs(p: *P, args: []G.Arg, opts: VisitArgsOpts) void {
|
||
const strict_loc = fnBodyContainsUseStrict(opts.body);
|
||
const has_simple_args = isSimpleParameterList(args, opts.has_rest_arg);
|
||
var duplicate_args_check: ?*StringVoidMap.Node = null;
|
||
defer {
|
||
if (duplicate_args_check) |checker| {
|
||
StringVoidMap.release(checker);
|
||
}
|
||
}
|
||
|
||
// Section 15.2.1 Static Semantics: Early Errors: "It is a Syntax Error if
|
||
// FunctionBodyContainsUseStrict of FunctionBody is true and
|
||
// IsSimpleParameterList of FormalParameters is false."
|
||
if (strict_loc != null and !has_simple_args) {
|
||
p.log.addRangeError(p.source, p.source.rangeOfString(strict_loc.?), "Cannot use a \"use strict\" directive in a function with a non-simple parameter list") catch unreachable;
|
||
}
|
||
|
||
// Section 15.1.1 Static Semantics: Early Errors: "Multiple occurrences of
|
||
// the same BindingIdentifier in a FormalParameterList is only allowed for
|
||
// functions which have simple parameter lists and which are not defined in
|
||
// strict mode code."
|
||
if (opts.is_unique_formal_parameters or strict_loc != null or !has_simple_args or p.isStrictMode()) {
|
||
duplicate_args_check = StringVoidMap.get(bun.default_allocator);
|
||
}
|
||
|
||
const duplicate_args_check_ptr: ?*StringVoidMap = if (duplicate_args_check != null)
|
||
&duplicate_args_check.?.data
|
||
else
|
||
null;
|
||
|
||
for (args) |*arg| {
|
||
if (arg.ts_decorators.len > 0) {
|
||
arg.ts_decorators = p.visitTSDecorators(arg.ts_decorators);
|
||
}
|
||
|
||
p.visitBinding(arg.binding, duplicate_args_check_ptr);
|
||
if (arg.default) |default| {
|
||
arg.default = p.visitExpr(default);
|
||
}
|
||
}
|
||
}
|
||
|
||
pub fn visitTSDecorators(p: *P, decs: ExprNodeList) ExprNodeList {
|
||
for (decs.slice()) |*dec| {
|
||
dec.* = p.visitExpr(dec.*);
|
||
}
|
||
|
||
return decs;
|
||
}
|
||
|
||
pub fn keepExprSymbolName(_: *P, _value: Expr, _: string) Expr {
|
||
return _value;
|
||
// var start = p.expr_list.items.len;
|
||
// p.expr_list.ensureUnusedCapacity(2) catch unreachable;
|
||
// p.expr_list.appendAssumeCapacity(_value);
|
||
// p.expr_list.appendAssumeCapacity(p.newExpr(E.String{
|
||
// .utf8 = name,
|
||
// }, _value.loc));
|
||
|
||
// var value = p.callRuntime(_value.loc, "ℹ", p.expr_list.items[start..p.expr_list.items.len]);
|
||
// // Make sure tree shaking removes this if the function is never used
|
||
// value.getCall().can_be_unwrapped_if_unused = true;
|
||
// return value;
|
||
}
|
||
|
||
pub fn fnBodyContainsUseStrict(body: []Stmt) ?logger.Loc {
|
||
for (body) |stmt| {
|
||
// "use strict" has to appear at the top of the function body
|
||
// but we can allow comments
|
||
switch (stmt.data) {
|
||
.s_comment => {
|
||
continue;
|
||
},
|
||
.s_directive => |dir| {
|
||
if (strings.eqlComptime(dir.value, "use strict")) {
|
||
return stmt.loc;
|
||
}
|
||
},
|
||
.s_empty => {},
|
||
else => return null,
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
pub fn isSimpleParameterList(args: []G.Arg, has_rest_arg: bool) bool {
|
||
if (has_rest_arg) {
|
||
return false;
|
||
}
|
||
|
||
for (args) |arg| {
|
||
if (@as(Binding.Tag, arg.binding.data) != .b_identifier or arg.default != null) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
pub fn classCanBeRemovedIfUnused(p: *P, class: *G.Class) bool {
|
||
if (!p.options.features.dead_code_elimination) return false;
|
||
if (class.extends) |*extends| {
|
||
if (!p.exprCanBeRemovedIfUnused(extends)) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
for (class.properties) |*property| {
|
||
if (property.kind == .class_static_block) {
|
||
if (!p.stmtsCanBeRemovedIfUnused(property.class_static_block.?.stmts.slice())) {
|
||
return false;
|
||
}
|
||
continue;
|
||
}
|
||
|
||
if (!p.exprCanBeRemovedIfUnused(&(property.key orelse unreachable))) {
|
||
return false;
|
||
}
|
||
|
||
if (property.value) |*val| {
|
||
if (!p.exprCanBeRemovedIfUnused(val)) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
if (property.initializer) |*val| {
|
||
if (!p.exprCanBeRemovedIfUnused(val)) {
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
// TODO:
|
||
// When React Fast Refresh is enabled, anything that's a JSX component should not be removable
|
||
// This is to improve the reliability of fast refresh between page loads.
|
||
pub fn exprCanBeRemovedIfUnused(p: *P, expr: *const Expr) bool {
|
||
if (!p.options.features.dead_code_elimination) return false;
|
||
switch (expr.data) {
|
||
.e_null,
|
||
.e_undefined,
|
||
.e_missing,
|
||
.e_boolean,
|
||
.e_number,
|
||
.e_big_int,
|
||
.e_string,
|
||
.e_this,
|
||
.e_reg_exp,
|
||
.e_function,
|
||
.e_arrow,
|
||
.e_import_meta,
|
||
=> {
|
||
return true;
|
||
},
|
||
|
||
.e_inlined_enum => |e| return p.exprCanBeRemovedIfUnused(&e.value),
|
||
|
||
.e_dot => |ex| {
|
||
return ex.can_be_removed_if_unused;
|
||
},
|
||
.e_class => |ex| {
|
||
return p.classCanBeRemovedIfUnused(ex);
|
||
},
|
||
.e_identifier => |ex| {
|
||
bun.assert(!ex.ref.isSourceContentsSlice()); // was not visited
|
||
|
||
if (ex.must_keep_due_to_with_stmt) {
|
||
return false;
|
||
}
|
||
|
||
// Unbound identifiers cannot be removed because they can have side effects.
|
||
// One possible side effect is throwing a ReferenceError if they don't exist.
|
||
// Another one is a getter with side effects on the global object:
|
||
//
|
||
// Object.defineProperty(globalThis, 'x', {
|
||
// get() {
|
||
// sideEffect();
|
||
// },
|
||
// });
|
||
//
|
||
// Be very careful about this possibility. It's tempting to treat all
|
||
// identifier expressions as not having side effects but that's wrong. We
|
||
// must make sure they have been declared by the code we are currently
|
||
// compiling before we can tell that they have no side effects.
|
||
//
|
||
// Note that we currently ignore ReferenceErrors due to TDZ access. This is
|
||
// incorrect but proper TDZ analysis is very complicated and would have to
|
||
// be very conservative, which would inhibit a lot of optimizations of code
|
||
// inside closures. This may need to be revisited if it proves problematic.
|
||
if (ex.can_be_removed_if_unused or p.symbols.items[ex.ref.innerIndex()].kind != .unbound) {
|
||
return true;
|
||
}
|
||
},
|
||
.e_commonjs_export_identifier, .e_import_identifier => {
|
||
|
||
// References to an ES6 import item are always side-effect free in an
|
||
// ECMAScript environment.
|
||
//
|
||
// They could technically have side effects if the imported module is a
|
||
// CommonJS module and the import item was translated to a property access
|
||
// (which esbuild's bundler does) and the property has a getter with side
|
||
// effects.
|
||
//
|
||
// But this is very unlikely and respecting this edge case would mean
|
||
// disabling tree shaking of all code that references an export from a
|
||
// CommonJS module. It would also likely violate the expectations of some
|
||
// developers because the code *looks* like it should be able to be tree
|
||
// shaken.
|
||
//
|
||
// So we deliberately ignore this edge case and always treat import item
|
||
// references as being side-effect free.
|
||
return true;
|
||
},
|
||
.e_if => |ex| {
|
||
return p.exprCanBeRemovedIfUnused(&ex.test_) and
|
||
(p.isSideEffectFreeUnboundIdentifierRef(
|
||
ex.yes,
|
||
ex.test_,
|
||
true,
|
||
) or
|
||
p.exprCanBeRemovedIfUnused(&ex.yes)) and
|
||
(p.isSideEffectFreeUnboundIdentifierRef(
|
||
ex.no,
|
||
ex.test_,
|
||
false,
|
||
) or p.exprCanBeRemovedIfUnused(
|
||
&ex.no,
|
||
));
|
||
},
|
||
.e_array => |ex| {
|
||
for (ex.items.slice()) |*item| {
|
||
if (!p.exprCanBeRemovedIfUnused(item)) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
},
|
||
.e_object => |ex| {
|
||
for (ex.properties.slice()) |*property| {
|
||
|
||
// The key must still be evaluated if it's computed or a spread
|
||
if (property.kind == .spread or (property.flags.contains(.is_computed) and !property.key.?.isPrimitiveLiteral()) or property.flags.contains(.is_spread)) {
|
||
return false;
|
||
}
|
||
|
||
if (property.value) |*val| {
|
||
if (!p.exprCanBeRemovedIfUnused(val)) {
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
return true;
|
||
},
|
||
.e_call => |ex| {
|
||
// A call that has been marked "__PURE__" can be removed if all arguments
|
||
// can be removed. The annotation causes us to ignore the target.
|
||
if (ex.can_be_unwrapped_if_unused) {
|
||
for (ex.args.slice()) |*arg| {
|
||
if (!p.exprCanBeRemovedIfUnused(arg)) {
|
||
return false;
|
||
}
|
||
}
|
||
return true;
|
||
}
|
||
},
|
||
.e_new => |ex| {
|
||
|
||
// A call that has been marked "__PURE__" can be removed if all arguments
|
||
// can be removed. The annotation causes us to ignore the target.
|
||
if (ex.can_be_unwrapped_if_unused) {
|
||
for (ex.args.slice()) |*arg| {
|
||
if (!p.exprCanBeRemovedIfUnused(arg)) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
},
|
||
.e_unary => |ex| {
|
||
switch (ex.op) {
|
||
// These operators must not have any type conversions that can execute code
|
||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
.un_void, .un_not => {
|
||
return p.exprCanBeRemovedIfUnused(&ex.value);
|
||
},
|
||
|
||
// The "typeof" operator doesn't do any type conversions so it can be removed
|
||
// if the result is unused and the operand has no side effects. However, it
|
||
// has a special case where if the operand is an identifier expression such
|
||
// as "typeof x" and "x" doesn't exist, no reference error is thrown so the
|
||
// operation has no side effects.
|
||
//
|
||
// Note that there *is* actually a case where "typeof x" can throw an error:
|
||
// when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ
|
||
// checks are not yet handled correctly by bun or esbuild, so this possibility is
|
||
// currently ignored.
|
||
.un_typeof => {
|
||
if (ex.value.data == .e_identifier) {
|
||
return true;
|
||
}
|
||
|
||
return p.exprCanBeRemovedIfUnused(&ex.value);
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
},
|
||
.e_binary => |ex| {
|
||
switch (ex.op) {
|
||
// These operators must not have any type conversions that can execute code
|
||
// such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
.bin_strict_eq,
|
||
.bin_strict_ne,
|
||
.bin_comma,
|
||
.bin_nullish_coalescing,
|
||
=> return p.exprCanBeRemovedIfUnused(&ex.left) and p.exprCanBeRemovedIfUnused(&ex.right),
|
||
|
||
// Special-case "||" to make sure "typeof x === 'undefined' || x" can be removed
|
||
.bin_logical_or => return p.exprCanBeRemovedIfUnused(&ex.left) and
|
||
(p.isSideEffectFreeUnboundIdentifierRef(ex.right, ex.left, false) or p.exprCanBeRemovedIfUnused(&ex.right)),
|
||
|
||
// Special-case "&&" to make sure "typeof x !== 'undefined' && x" can be removed
|
||
.bin_logical_and => return p.exprCanBeRemovedIfUnused(&ex.left) and
|
||
(p.isSideEffectFreeUnboundIdentifierRef(ex.right, ex.left, true) or p.exprCanBeRemovedIfUnused(&ex.right)),
|
||
|
||
// For "==" and "!=", pretend the operator was actually "===" or "!==". If
|
||
// we know that we can convert it to "==" or "!=", then we can consider the
|
||
// operator itself to have no side effects. This matters because our mangle
|
||
// logic will convert "typeof x === 'object'" into "typeof x == 'object'"
|
||
// and since "typeof x === 'object'" is considered to be side-effect free,
|
||
// we must also consider "typeof x == 'object'" to be side-effect free.
|
||
.bin_loose_eq, .bin_loose_ne => return SideEffects.canChangeStrictToLoose(
|
||
ex.left.data,
|
||
ex.right.data,
|
||
) and
|
||
p.exprCanBeRemovedIfUnused(&ex.left) and p.exprCanBeRemovedIfUnused(&ex.right),
|
||
else => {},
|
||
}
|
||
},
|
||
.e_template => |templ| {
|
||
if (templ.tag == null) {
|
||
for (templ.parts) |part| {
|
||
if (!p.exprCanBeRemovedIfUnused(&part.value) or part.value.knownPrimitive() == .unknown) {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
// // This is based on exprCanBeRemovedIfUnused.
|
||
// // The main difference: identifiers, functions, arrow functions cause it to return false
|
||
// pub fn exprCanBeHoistedForJSX(p: *P, expr: *const Expr) bool {
|
||
// if (comptime jsx_transform_type != .react) {
|
||
// unreachable;
|
||
// }
|
||
|
||
// switch (expr.data) {
|
||
// .e_null,
|
||
// .e_undefined,
|
||
// .e_missing,
|
||
// .e_boolean,
|
||
// .e_number,
|
||
// .e_big_int,
|
||
// .e_string,
|
||
// .e_reg_exp,
|
||
// => {
|
||
// return true;
|
||
// },
|
||
|
||
// .e_dot => |ex| {
|
||
// return ex.can_be_removed_if_unused;
|
||
// },
|
||
// .e_import_identifier => {
|
||
|
||
// // References to an ES6 import item are always side-effect free in an
|
||
// // ECMAScript environment.
|
||
// //
|
||
// // They could technically have side effects if the imported module is a
|
||
// // CommonJS module and the import item was translated to a property access
|
||
// // (which esbuild's bundler does) and the property has a getter with side
|
||
// // effects.
|
||
// //
|
||
// // But this is very unlikely and respecting this edge case would mean
|
||
// // disabling tree shaking of all code that references an export from a
|
||
// // CommonJS module. It would also likely violate the expectations of some
|
||
// // developers because the code *looks* like it should be able to be tree
|
||
// // shaken.
|
||
// //
|
||
// // So we deliberately ignore this edge case and always treat import item
|
||
// // references as being side-effect free.
|
||
// return true;
|
||
// },
|
||
// .e_if => |ex| {
|
||
// return p.exprCanBeHoistedForJSX(&ex.test_) and
|
||
// (p.isSideEffectFreeUnboundIdentifierRef(
|
||
// ex.yes,
|
||
// ex.test_,
|
||
// true,
|
||
// ) or
|
||
// p.exprCanBeHoistedForJSX(&ex.yes)) and
|
||
// (p.isSideEffectFreeUnboundIdentifierRef(
|
||
// ex.no,
|
||
// ex.test_,
|
||
// false,
|
||
// ) or p.exprCanBeHoistedForJSX(
|
||
// &ex.no,
|
||
// ));
|
||
// },
|
||
// .e_array => |ex| {
|
||
// for (ex.items.slice()) |*item| {
|
||
// if (!p.exprCanBeHoistedForJSX(item)) {
|
||
// return false;
|
||
// }
|
||
// }
|
||
|
||
// return true;
|
||
// },
|
||
// .e_object => |ex| {
|
||
// // macros disable this because macros get inlined
|
||
// // so it's sort of the opposite of the purpose of this function
|
||
// if (ex.was_originally_macro)
|
||
// return false;
|
||
|
||
// for (ex.properties.slice()) |*property| {
|
||
|
||
// // The key must still be evaluated if it's computed or a spread
|
||
// if (property.kind == .spread or property.flags.contains(.is_computed) or property.flags.contains(.is_spread)) {
|
||
// return false;
|
||
// }
|
||
|
||
// if (property.value) |*val| {
|
||
// if (!p.exprCanBeHoistedForJSX(val)) {
|
||
// return false;
|
||
// }
|
||
// }
|
||
// }
|
||
// return true;
|
||
// },
|
||
// .e_call => |ex| {
|
||
|
||
// // A call that has been marked "__PURE__" can be removed if all arguments
|
||
// // can be removed. The annotation causes us to ignore the target.
|
||
// if (ex.can_be_unwrapped_if_unused) {
|
||
// for (ex.args.slice()) |*arg| {
|
||
// if (!p.exprCanBeHoistedForJSX(arg)) {
|
||
// return false;
|
||
// }
|
||
// }
|
||
// return true;
|
||
// }
|
||
// },
|
||
// .e_new => |ex| {
|
||
|
||
// // A call that has been marked "__PURE__" can be removed if all arguments
|
||
// // can be removed. The annotation causes us to ignore the target.
|
||
// if (ex.can_be_unwrapped_if_unused) {
|
||
// for (ex.args.slice()) |*arg| {
|
||
// if (!p.exprCanBeHoistedForJSX(arg)) {
|
||
// return false;
|
||
// }
|
||
// }
|
||
|
||
// return true;
|
||
// }
|
||
// },
|
||
// .e_unary => |ex| {
|
||
// switch (ex.op) {
|
||
// // These operators must not have any type conversions that can execute code
|
||
// // such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
// .un_void, .un_not => {
|
||
// return p.exprCanBeHoistedForJSX(&ex.value);
|
||
// },
|
||
|
||
// // The "typeof" operator doesn't do any type conversions so it can be removed
|
||
// // if the result is unused and the operand has no side effects. However, it
|
||
// // has a special case where if the operand is an identifier expression such
|
||
// // as "typeof x" and "x" doesn't exist, no reference error is thrown so the
|
||
// // operation has no side effects.
|
||
// //
|
||
// // Note that there *is* actually a case where "typeof x" can throw an error:
|
||
// // when "x" is being referenced inside of its TDZ (temporal dead zone). TDZ
|
||
// // checks are not yet handled correctly by bun or esbuild, so this possibility is
|
||
// // currently ignored.
|
||
// .un_typeof => {
|
||
// if (ex.value.data == .e_identifier) {
|
||
// return true;
|
||
// }
|
||
|
||
// return p.exprCanBeHoistedForJSX(&ex.value);
|
||
// },
|
||
|
||
// else => {},
|
||
// }
|
||
// },
|
||
// .e_binary => |ex| {
|
||
// switch (ex.op) {
|
||
// // These operators must not have any type conversions that can execute code
|
||
// // such as "toString" or "valueOf". They must also never throw any exceptions.
|
||
// .bin_strict_eq,
|
||
// .bin_strict_ne,
|
||
// .bin_comma,
|
||
// .bin_nullish_coalescing,
|
||
// => return p.exprCanBeHoistedForJSX(&ex.left) and p.exprCanBeHoistedForJSX(&ex.right),
|
||
|
||
// // Special-case "||" to make sure "typeof x === 'undefined' || x" can be removed
|
||
// .bin_logical_or => return p.exprCanBeHoistedForJSX(&ex.left) and
|
||
// (p.isSideEffectFreeUnboundIdentifierRef(ex.right, ex.left, false) or p.exprCanBeHoistedForJSX(&ex.right)),
|
||
|
||
// // Special-case "&&" to make sure "typeof x !== 'undefined' && x" can be removed
|
||
// .bin_logical_and => return p.exprCanBeHoistedForJSX(&ex.left) and
|
||
// (p.isSideEffectFreeUnboundIdentifierRef(ex.right, ex.left, true) or p.exprCanBeHoistedForJSX(&ex.right)),
|
||
|
||
// // For "==" and "!=", pretend the operator was actually "===" or "!==". If
|
||
// // we know that we can convert it to "==" or "!=", then we can consider the
|
||
// // operator itself to have no side effects. This matters because our mangle
|
||
// // logic will convert "typeof x === 'object'" into "typeof x == 'object'"
|
||
// // and since "typeof x === 'object'" is considered to be side-effect free,
|
||
// // we must also consider "typeof x == 'object'" to be side-effect free.
|
||
// .bin_loose_eq, .bin_loose_ne => return SideEffects.canChangeStrictToLoose(
|
||
// ex.left.data,
|
||
// ex.right.data,
|
||
// ) and
|
||
// p.exprCanBeHoistedForJSX(&ex.left) and p.exprCanBeHoistedForJSX(&ex.right),
|
||
// else => {},
|
||
// }
|
||
// },
|
||
// .e_template => |templ| {
|
||
// if (templ.tag == null) {
|
||
// for (templ.parts) |part| {
|
||
// if (!p.exprCanBeHoistedForJSX(&part.value) or part.value.knownPrimitive() == .unknown) {
|
||
// return false;
|
||
// }
|
||
// }
|
||
// }
|
||
|
||
// return true;
|
||
// },
|
||
// else => {},
|
||
|
||
// // These may reference variables from an upper scope
|
||
// // it's possible to detect that, but we are cutting scope for now
|
||
// // .e_function,
|
||
// // .e_arrow,
|
||
// // .e_this,
|
||
// }
|
||
|
||
// return false;
|
||
// }
|
||
|
||
fn isSideEffectFreeUnboundIdentifierRef(p: *P, value: Expr, guard_condition: Expr, is_yes_branch: bool) bool {
|
||
if (value.data != .e_identifier or
|
||
p.symbols.items[value.data.e_identifier.ref.innerIndex()].kind != .unbound or
|
||
guard_condition.data != .e_binary)
|
||
return false;
|
||
|
||
const binary = guard_condition.data.e_binary.*;
|
||
|
||
switch (binary.op) {
|
||
.bin_strict_eq, .bin_strict_ne, .bin_loose_eq, .bin_loose_ne => {
|
||
// typeof x !== 'undefined'
|
||
var typeof: Expr.Data = binary.left.data;
|
||
var compare: Expr.Data = binary.right.data;
|
||
// typeof 'undefined' !== x
|
||
if (typeof == .e_string) {
|
||
typeof = binary.right.data;
|
||
compare = binary.left.data;
|
||
}
|
||
|
||
// this order because Expr.Data Tag is not a pointer
|
||
// so it should be slightly faster to compare
|
||
if (compare != .e_string or
|
||
typeof != .e_unary)
|
||
return false;
|
||
const unary = typeof.e_unary.*;
|
||
|
||
if (unary.op != .un_typeof or unary.value.data != .e_identifier)
|
||
return false;
|
||
|
||
const id = value.data.e_identifier.ref;
|
||
const id2 = unary.value.data.e_identifier.ref;
|
||
return ((compare.e_string.eqlComptime("undefined") == is_yes_branch) ==
|
||
(binary.op == .bin_strict_ne or binary.op == .bin_loose_ne)) and
|
||
id.eql(id2);
|
||
},
|
||
else => return false,
|
||
}
|
||
}
|
||
|
||
fn jsxImportAutomatic(p: *P, loc: logger.Loc, is_static: bool) Expr {
|
||
return p.jsxImport(
|
||
if (is_static and !p.options.jsx.development and FeatureFlags.support_jsxs_in_jsx_transform)
|
||
.jsxs
|
||
else if (p.options.jsx.development)
|
||
.jsxDEV
|
||
else
|
||
.jsx,
|
||
loc,
|
||
);
|
||
}
|
||
|
||
fn jsxImport(p: *P, kind: JSXImport, loc: logger.Loc) Expr {
|
||
switch (kind) {
|
||
inline else => |field| {
|
||
const ref: Ref = brk: {
|
||
if (p.jsx_imports.getWithTag(kind) == null) {
|
||
const symbol_name = @tagName(field);
|
||
|
||
const loc_ref = LocRef{
|
||
.loc = loc,
|
||
.ref = (p.declareGeneratedSymbol(.other, symbol_name) catch unreachable),
|
||
};
|
||
|
||
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
|
||
p.is_import_item.put(p.allocator, loc_ref.ref.?, {}) catch unreachable;
|
||
@field(p.jsx_imports, @tagName(field)) = loc_ref;
|
||
break :brk loc_ref.ref.?;
|
||
}
|
||
|
||
break :brk p.jsx_imports.getWithTag(kind).?;
|
||
};
|
||
|
||
p.recordUsage(ref);
|
||
return p.handleIdentifier(
|
||
loc,
|
||
E.Identifier{
|
||
.ref = ref,
|
||
.can_be_removed_if_unused = true,
|
||
.call_can_be_unwrapped_if_unused = true,
|
||
},
|
||
null,
|
||
.{
|
||
.was_originally_identifier = true,
|
||
},
|
||
);
|
||
},
|
||
}
|
||
}
|
||
|
||
fn selectLocalKind(p: *P, kind: S.Local.Kind) S.Local.Kind {
|
||
// When using Kit's HMR implementation, we need to preserve the local kind
|
||
// if possible, as more efficient code can be generated if something is known
|
||
// not to be an ESM live binding.
|
||
if (p.options.features.hot_module_reloading) {
|
||
return kind;
|
||
}
|
||
|
||
// Use "var" instead of "let" and "const" if the variable declaration may
|
||
// need to be separated from the initializer. This allows us to safely move
|
||
// this declaration into a nested scope.
|
||
if ((p.options.bundle or p.will_wrap_module_in_try_catch_for_using) and
|
||
p.current_scope.parent == null and !kind.isUsing())
|
||
{
|
||
return .k_var;
|
||
}
|
||
|
||
// Optimization: use "let" instead of "const" because it's shorter. This is
|
||
// only done when bundling because assigning to "const" is only an error when bundling.
|
||
if (p.options.bundle and kind == .k_const and p.options.features.minify_syntax) {
|
||
return .k_let;
|
||
}
|
||
|
||
return kind;
|
||
}
|
||
|
||
fn maybeRelocateVarsToTopLevel(p: *P, decls: []const G.Decl, mode: RelocateVars.Mode) RelocateVars {
|
||
// Only do this when the scope is not already top-level and when we're not inside a function.
|
||
if (p.current_scope == p.module_scope) {
|
||
return .{ .ok = false };
|
||
}
|
||
|
||
var scope = p.current_scope;
|
||
while (!scope.kindStopsHoisting()) {
|
||
if (comptime Environment.allow_assert) assert(scope.parent != null);
|
||
scope = scope.parent.?;
|
||
}
|
||
|
||
if (scope != p.module_scope) {
|
||
return .{ .ok = false };
|
||
}
|
||
|
||
var value: Expr = Expr{ .loc = logger.Loc.Empty, .data = Expr.Data{ .e_missing = E.Missing{} } };
|
||
|
||
for (decls) |decl| {
|
||
const binding = Binding.toExpr(
|
||
&decl.binding,
|
||
p.to_expr_wrapper_hoisted,
|
||
);
|
||
if (decl.value) |decl_value| {
|
||
value = value.joinWithComma(Expr.assign(binding, decl_value), p.allocator);
|
||
} else if (mode == .for_in_or_for_of) {
|
||
value = value.joinWithComma(binding, p.allocator);
|
||
}
|
||
}
|
||
|
||
if (value.data == .e_missing) {
|
||
return .{ .ok = true };
|
||
}
|
||
|
||
return .{ .stmt = p.s(S.SExpr{ .value = value }, value.loc), .ok = true };
|
||
}
|
||
|
||
// EDot nodes represent a property access. This function may return an
|
||
// expression to replace the property access with. It assumes that the
|
||
// target of the EDot expression has already been visited.
|
||
fn maybeRewritePropertyAccess(
|
||
p: *P,
|
||
loc: logger.Loc,
|
||
target: js_ast.Expr,
|
||
name: string,
|
||
name_loc: logger.Loc,
|
||
identifier_opts: IdentifierOpts,
|
||
) ?Expr {
|
||
switch (target.data) {
|
||
.e_identifier => |id| {
|
||
// Rewrite property accesses on explicit namespace imports as an identifier.
|
||
// This lets us replace them easily in the printer to rebind them to
|
||
// something else without paying the cost of a whole-tree traversal during
|
||
// module linking just to rewrite these EDot expressions.
|
||
if (p.options.bundle) {
|
||
if (p.import_items_for_namespace.getPtr(id.ref)) |import_items| {
|
||
const ref = (import_items.get(name) orelse brk: {
|
||
// Generate a new import item symbol in the module scope
|
||
const new_item = LocRef{
|
||
.loc = name_loc,
|
||
.ref = p.newSymbol(.import, name) catch unreachable,
|
||
};
|
||
p.module_scope.generated.push(p.allocator, new_item.ref.?) catch unreachable;
|
||
|
||
import_items.put(name, new_item) catch unreachable;
|
||
p.is_import_item.put(p.allocator, new_item.ref.?, {}) catch unreachable;
|
||
|
||
var symbol = &p.symbols.items[new_item.ref.?.innerIndex()];
|
||
|
||
// Mark this as generated in case it's missing. We don't want to
|
||
// generate errors for missing import items that are automatically
|
||
// generated.
|
||
symbol.import_item_status = .generated;
|
||
|
||
break :brk new_item;
|
||
}).ref.?;
|
||
|
||
// Undo the usage count for the namespace itself. This is used later
|
||
// to detect whether the namespace symbol has ever been "captured"
|
||
// or whether it has just been used to read properties off of.
|
||
//
|
||
// The benefit of doing this is that if both this module and the
|
||
// imported module end up in the same module group and the namespace
|
||
// symbol has never been captured, then we don't need to generate
|
||
// any code for the namespace at all.
|
||
p.ignoreUsage(id.ref);
|
||
|
||
// Track how many times we've referenced this symbol
|
||
p.recordUsage(ref);
|
||
|
||
return p.handleIdentifier(
|
||
name_loc,
|
||
E.Identifier{ .ref = ref },
|
||
name,
|
||
.{
|
||
.assign_target = identifier_opts.assign_target,
|
||
.is_call_target = identifier_opts.is_call_target,
|
||
.is_delete_target = identifier_opts.is_delete_target,
|
||
|
||
// If this expression is used as the target of a call expression, make
|
||
// sure the value of "this" is preserved.
|
||
.was_originally_identifier = false,
|
||
},
|
||
);
|
||
}
|
||
}
|
||
|
||
if (!p.is_control_flow_dead and id.ref.eql(p.module_ref)) {
|
||
// Rewrite "module.require()" to "require()" for Webpack compatibility.
|
||
// See https://github.com/webpack/webpack/pull/7750 for more info.
|
||
// This also makes correctness a little easier.
|
||
if (identifier_opts.is_call_target and strings.eqlComptime(name, "require")) {
|
||
p.ignoreUsage(p.module_ref);
|
||
return p.valueForRequire(name_loc);
|
||
} else if (!p.commonjs_named_exports_deoptimized and strings.eqlComptime(name, "exports")) {
|
||
if (identifier_opts.assign_target != .none) {
|
||
p.commonjs_module_exports_assigned_deoptimized = true;
|
||
}
|
||
|
||
// Detect if we are doing
|
||
//
|
||
// module.exports = {
|
||
// foo: "bar"
|
||
// }
|
||
//
|
||
// Note that it cannot be any of these:
|
||
//
|
||
// module.exports += { };
|
||
// delete module.exports = {};
|
||
// module.exports()
|
||
if (!(identifier_opts.is_call_target or identifier_opts.is_delete_target) and
|
||
identifier_opts.assign_target == .replace and
|
||
p.stmt_expr_value == .e_binary and
|
||
p.stmt_expr_value.e_binary.op == .bin_assign)
|
||
{
|
||
if (
|
||
// if it's not top-level, don't do this
|
||
p.module_scope != p.current_scope or
|
||
// if you do
|
||
//
|
||
// exports.foo = 123;
|
||
// module.exports = {};
|
||
//
|
||
// that's a de-opt.
|
||
p.commonjs_named_exports.count() > 0 or
|
||
|
||
// anything which is not module.exports = {} is a de-opt.
|
||
p.stmt_expr_value.e_binary.right.data != .e_object or
|
||
p.stmt_expr_value.e_binary.left.data != .e_dot or
|
||
!strings.eqlComptime(p.stmt_expr_value.e_binary.left.data.e_dot.name, "exports") or
|
||
p.stmt_expr_value.e_binary.left.data.e_dot.target.data != .e_identifier or
|
||
!p.stmt_expr_value.e_binary.left.data.e_dot.target.data.e_identifier.ref.eql(p.module_ref))
|
||
{
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
|
||
const props: []const G.Property = p.stmt_expr_value.e_binary.right.data.e_object.properties.slice();
|
||
for (props) |prop| {
|
||
// if it's not a trivial object literal, de-opt
|
||
if (prop.kind != .normal or
|
||
prop.key == null or
|
||
prop.key.?.data != .e_string or
|
||
prop.flags.contains(Flags.Property.is_method) or
|
||
prop.flags.contains(Flags.Property.is_computed) or
|
||
prop.flags.contains(Flags.Property.is_spread) or
|
||
prop.flags.contains(Flags.Property.is_static) or
|
||
// If it creates a new scope, we can't do this optimization right now
|
||
// Our scope order verification stuff will get mad
|
||
// But we should let you do module.exports = { bar: foo(), baz: 123 }
|
||
// just not module.exports = { bar: function() {} }
|
||
// just not module.exports = { bar() {} }
|
||
switch (prop.value.?.data) {
|
||
.e_commonjs_export_identifier, .e_import_identifier, .e_identifier => false,
|
||
.e_call => |call| switch (call.target.data) {
|
||
.e_commonjs_export_identifier, .e_import_identifier, .e_identifier => false,
|
||
else => |call_target| !@as(Expr.Tag, call_target).isPrimitiveLiteral(),
|
||
},
|
||
else => !prop.value.?.isPrimitiveLiteral(),
|
||
}) {
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
} else {
|
||
// empty object de-opts because otherwise the statement becomes
|
||
// <empty space> = {};
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
|
||
var stmts = std.ArrayList(Stmt).initCapacity(p.allocator, props.len * 2) catch unreachable;
|
||
var decls = p.allocator.alloc(Decl, props.len) catch unreachable;
|
||
var clause_items = p.allocator.alloc(js_ast.ClauseItem, props.len) catch unreachable;
|
||
|
||
for (props) |prop| {
|
||
const key = prop.key.?.data.e_string.string(p.allocator) catch unreachable;
|
||
const visited_value = p.visitExpr(prop.value.?);
|
||
const value = SideEffects.simplifyUnusedExpr(p, visited_value) orelse visited_value;
|
||
|
||
// We are doing `module.exports = { ... }`
|
||
// lets rewrite it to a series of what will become export assignments
|
||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, key) catch unreachable;
|
||
if (!named_export_entry.found_existing) {
|
||
const new_ref = p.newSymbol(
|
||
.other,
|
||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(key)}) catch unreachable,
|
||
) catch unreachable;
|
||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||
named_export_entry.value_ptr.* = .{
|
||
.loc_ref = LocRef{
|
||
.loc = name_loc,
|
||
.ref = new_ref,
|
||
},
|
||
.needs_decl = false,
|
||
};
|
||
}
|
||
const ref = named_export_entry.value_ptr.loc_ref.ref.?;
|
||
// module.exports = {
|
||
// foo: "bar",
|
||
// baz: "qux",
|
||
// }
|
||
// ->
|
||
// exports.foo = "bar", exports.baz = "qux"
|
||
// Which will become
|
||
// $foo = "bar";
|
||
// $baz = "qux";
|
||
// export { $foo as foo, $baz as baz }
|
||
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = ref }, prop.key.?.loc),
|
||
.value = value,
|
||
};
|
||
// we have to ensure these are known to be top-level
|
||
p.declared_symbols.append(p.allocator, .{
|
||
.ref = ref,
|
||
.is_top_level = true,
|
||
}) catch unreachable;
|
||
p.had_commonjs_named_exports_this_visit = true;
|
||
clause_items[0] = js_ast.ClauseItem{
|
||
// We want the generated name to not conflict
|
||
.alias = key,
|
||
.alias_loc = prop.key.?.loc,
|
||
.name = named_export_entry.value_ptr.loc_ref,
|
||
};
|
||
|
||
stmts.appendSlice(
|
||
&[_]Stmt{
|
||
p.s(
|
||
S.Local{
|
||
.kind = .k_var,
|
||
.is_export = false,
|
||
.was_commonjs_export = true,
|
||
.decls = G.Decl.List.init(decls[0..1]),
|
||
},
|
||
prop.key.?.loc,
|
||
),
|
||
p.s(
|
||
S.ExportClause{
|
||
.items = clause_items[0..1],
|
||
.is_single_line = true,
|
||
},
|
||
prop.key.?.loc,
|
||
),
|
||
},
|
||
) catch unreachable;
|
||
decls = decls[1..];
|
||
clause_items = clause_items[1..];
|
||
}
|
||
|
||
p.ignoreUsage(p.module_ref);
|
||
p.commonjs_replacement_stmts = stmts.items;
|
||
return p.newExpr(E.Missing{}, name_loc);
|
||
}
|
||
|
||
// Deoptimizations:
|
||
// delete module.exports
|
||
// module.exports();
|
||
if (identifier_opts.is_call_target or identifier_opts.is_delete_target or identifier_opts.assign_target != .none) {
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
|
||
// rewrite `module.exports` to `exports`
|
||
return .{ .data = .e_module_dot_exports, .loc = name_loc };
|
||
} else if (p.options.bundle and strings.eqlComptime(name, "id") and identifier_opts.assign_target == .none) {
|
||
// inline module.id
|
||
p.ignoreUsage(p.module_ref);
|
||
return p.newExpr(E.String.init(p.source.path.pretty), name_loc);
|
||
} else if (p.options.bundle and strings.eqlComptime(name, "filename") and identifier_opts.assign_target == .none) {
|
||
// inline module.filename
|
||
p.ignoreUsage(p.module_ref);
|
||
return p.newExpr(E.String.init(p.source.path.name.filename), name_loc);
|
||
} else if (p.options.bundle and strings.eqlComptime(name, "path") and identifier_opts.assign_target == .none) {
|
||
// inline module.path
|
||
p.ignoreUsage(p.module_ref);
|
||
return p.newExpr(E.String.init(p.source.path.pretty), name_loc);
|
||
}
|
||
}
|
||
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
if (!p.is_control_flow_dead and id.ref.eql(p.exports_ref)) {
|
||
if (!p.commonjs_named_exports_deoptimized) {
|
||
if (identifier_opts.is_delete_target) {
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
|
||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
|
||
if (!named_export_entry.found_existing) {
|
||
const new_ref = p.newSymbol(
|
||
.other,
|
||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||
) catch unreachable;
|
||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||
named_export_entry.value_ptr.* = .{
|
||
.loc_ref = LocRef{
|
||
.loc = name_loc,
|
||
.ref = new_ref,
|
||
},
|
||
.needs_decl = true,
|
||
};
|
||
if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
|
||
p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
|
||
}
|
||
|
||
const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
|
||
p.ignoreUsage(id.ref);
|
||
p.recordUsage(ref);
|
||
|
||
return p.newExpr(
|
||
E.CommonJSExportIdentifier{
|
||
.ref = ref,
|
||
},
|
||
name_loc,
|
||
);
|
||
} else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
|
||
p.has_commonjs_export_names = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Handle references to namespaces or namespace members
|
||
if (p.ts_namespace.expr == .e_identifier and
|
||
id.ref.eql(p.ts_namespace.expr.e_identifier.ref) and
|
||
identifier_opts.assign_target == .none and
|
||
!identifier_opts.is_delete_target)
|
||
{
|
||
return p.maybeRewritePropertyAccessForNamespace(name, &target, loc, name_loc);
|
||
}
|
||
},
|
||
// TODO: e_inlined_enum -> .e_string -> "length" should inline the length
|
||
.e_string => |str| {
|
||
if (p.options.features.minify_syntax) {
|
||
// minify "long-string".length to 11
|
||
if (strings.eqlComptime(name, "length")) {
|
||
if (str.javascriptLength()) |len| {
|
||
return p.newExpr(E.Number{ .value = @floatFromInt(len) }, loc);
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.e_object => |obj| {
|
||
if (comptime FeatureFlags.inline_properties_in_transpiler) {
|
||
if (p.options.features.minify_syntax) {
|
||
// Rewrite a property access like this:
|
||
// { f: () => {} }.f
|
||
// To:
|
||
// () => {}
|
||
//
|
||
// To avoid thinking too much about edgecases, only do this for:
|
||
// 1) Objects with a single property
|
||
// 2) Not a method, not a computed property
|
||
if (obj.properties.len == 1 and
|
||
!identifier_opts.is_delete_target and
|
||
identifier_opts.assign_target == .none and !identifier_opts.is_call_target)
|
||
{
|
||
const prop: G.Property = obj.properties.ptr[0];
|
||
if (prop.value != null and
|
||
prop.flags.count() == 0 and
|
||
prop.key != null and
|
||
prop.key.?.data == .e_string and
|
||
prop.key.?.data.e_string.eql([]const u8, name) and
|
||
!bun.strings.eqlComptime(name, "__proto__"))
|
||
{
|
||
return prop.value.?;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.e_import_meta => {
|
||
// Make `import.meta.url` side effect free.
|
||
if (strings.eqlComptime(name, "url")) {
|
||
return p.newExpr(
|
||
E.Dot{
|
||
.target = target,
|
||
.name = name,
|
||
.name_loc = name_loc,
|
||
.can_be_removed_if_unused = true,
|
||
},
|
||
target.loc,
|
||
);
|
||
}
|
||
|
||
if (strings.eqlComptime(name, "main")) {
|
||
return p.valueForImportMetaMain(false, target.loc);
|
||
}
|
||
},
|
||
.e_require_call_target => {
|
||
if (strings.eqlComptime(name, "main")) {
|
||
return .{ .loc = loc, .data = .e_require_main };
|
||
}
|
||
},
|
||
.e_import_identifier => |id| {
|
||
// Symbol uses due to a property access off of an imported symbol are tracked
|
||
// specially. This lets us do tree shaking for cross-file TypeScript enums.
|
||
if (p.options.bundle and !p.is_control_flow_dead) {
|
||
const use = p.symbol_uses.getPtr(id.ref).?;
|
||
use.count_estimate -|= 1;
|
||
// note: this use is not removed as we assume it exists later
|
||
|
||
// Add a special symbol use instead
|
||
const gop = p.import_symbol_property_uses.getOrPutValue(
|
||
p.allocator,
|
||
id.ref,
|
||
.{},
|
||
) catch bun.outOfMemory();
|
||
const inner_use = gop.value_ptr.getOrPutValue(
|
||
p.allocator,
|
||
name,
|
||
.{},
|
||
) catch bun.outOfMemory();
|
||
inner_use.value_ptr.count_estimate += 1;
|
||
}
|
||
},
|
||
inline .e_dot, .e_index => |data, tag| {
|
||
if (p.ts_namespace.expr == tag and
|
||
data == @field(p.ts_namespace.expr, @tagName(tag)) and
|
||
identifier_opts.assign_target == .none and
|
||
!identifier_opts.is_delete_target)
|
||
{
|
||
return p.maybeRewritePropertyAccessForNamespace(name, &target, loc, name_loc);
|
||
}
|
||
},
|
||
.e_module_dot_exports => {
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
if (!p.is_control_flow_dead) {
|
||
if (!p.commonjs_named_exports_deoptimized) {
|
||
if (identifier_opts.is_delete_target) {
|
||
p.deoptimizeCommonJSNamedExports();
|
||
return null;
|
||
}
|
||
|
||
const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
|
||
if (!named_export_entry.found_existing) {
|
||
const new_ref = p.newSymbol(
|
||
.other,
|
||
std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
|
||
) catch unreachable;
|
||
p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
|
||
named_export_entry.value_ptr.* = .{
|
||
.loc_ref = LocRef{
|
||
.loc = name_loc,
|
||
.ref = new_ref,
|
||
},
|
||
.needs_decl = true,
|
||
};
|
||
if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
|
||
p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
|
||
}
|
||
|
||
const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
|
||
p.recordUsage(ref);
|
||
|
||
return p.newExpr(
|
||
E.CommonJSExportIdentifier{
|
||
.ref = ref,
|
||
// Record this as from module.exports
|
||
.base = .module_dot_exports,
|
||
},
|
||
name_loc,
|
||
);
|
||
} else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
|
||
p.has_commonjs_export_names = true;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
fn maybeRewritePropertyAccessForNamespace(
|
||
p: *P,
|
||
name: string,
|
||
target: *const Expr,
|
||
loc: logger.Loc,
|
||
name_loc: logger.Loc,
|
||
) ?Expr {
|
||
if (p.ts_namespace.map.?.get(name)) |value| {
|
||
switch (value.data) {
|
||
.enum_number => |num| {
|
||
p.ignoreUsageOfIdentifierInDotChain(target.*);
|
||
return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_number = .{ .value = num } } },
|
||
name,
|
||
);
|
||
},
|
||
|
||
.enum_string => |str| {
|
||
p.ignoreUsageOfIdentifierInDotChain(target.*);
|
||
return p.wrapInlinedEnum(
|
||
.{ .loc = loc, .data = .{ .e_string = str } },
|
||
name,
|
||
);
|
||
},
|
||
|
||
.namespace => |namespace| {
|
||
// If this isn't a constant, return a clone of this property access
|
||
// but with the namespace member data associated with it so that
|
||
// more property accesses off of this property access are recognized.
|
||
const expr = if (js_lexer.isIdentifier(name))
|
||
p.newExpr(E.Dot{
|
||
.target = target.*,
|
||
.name = name,
|
||
.name_loc = name_loc,
|
||
}, loc)
|
||
else
|
||
p.newExpr(E.Dot{
|
||
.target = target.*,
|
||
.name = name,
|
||
.name_loc = name_loc,
|
||
}, loc);
|
||
|
||
p.ts_namespace = .{
|
||
.expr = expr.data,
|
||
.map = namespace,
|
||
};
|
||
|
||
return expr;
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
pub fn ignoreUsage(p: *P, ref: Ref) void {
|
||
if (!p.is_control_flow_dead and !p.is_revisit_for_substitution) {
|
||
if (comptime Environment.allow_assert) assert(@as(usize, ref.innerIndex()) < p.symbols.items.len);
|
||
p.symbols.items[ref.innerIndex()].use_count_estimate -|= 1;
|
||
var use = p.symbol_uses.get(ref) orelse return;
|
||
use.count_estimate -|= 1;
|
||
if (use.count_estimate == 0) {
|
||
_ = p.symbol_uses.swapRemove(ref);
|
||
} else {
|
||
p.symbol_uses.putAssumeCapacity(ref, use);
|
||
}
|
||
}
|
||
|
||
// Don't roll back the "tsUseCounts" increment. This must be counted even if
|
||
// the value is ignored because that's what the TypeScript compiler does.
|
||
}
|
||
|
||
pub fn ignoreUsageOfIdentifierInDotChain(p: *P, expr: Expr) void {
|
||
var current = expr;
|
||
while (true) {
|
||
switch (current.data) {
|
||
.e_identifier => |id| {
|
||
p.ignoreUsage(id.ref);
|
||
},
|
||
.e_dot => |dot| {
|
||
current = dot.target;
|
||
continue;
|
||
},
|
||
.e_index => |index| {
|
||
if (index.index.isString()) {
|
||
current = index.target;
|
||
continue;
|
||
}
|
||
},
|
||
else => return,
|
||
}
|
||
|
||
return;
|
||
}
|
||
}
|
||
|
||
fn visitAndAppendStmt(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt) anyerror!void {
|
||
// By default any statement ends the const local prefix
|
||
const was_after_after_const_local_prefix = p.current_scope.is_after_const_local_prefix;
|
||
p.current_scope.is_after_const_local_prefix = true;
|
||
|
||
switch (stmt.data) {
|
||
// These don't contain anything to traverse
|
||
.s_debugger => {
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
if (p.define.drop_debugger) {
|
||
return;
|
||
}
|
||
},
|
||
.s_empty, .s_comment => {
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
},
|
||
.s_type_script => {
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
// Erase TypeScript constructs from the output completely
|
||
return;
|
||
},
|
||
.s_directive => {
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
},
|
||
.s_import => |data| {
|
||
try p.recordDeclaredSymbol(data.namespace_ref);
|
||
|
||
if (data.default_name) |default_name| {
|
||
try p.recordDeclaredSymbol(default_name.ref.?);
|
||
}
|
||
|
||
if (data.items.len > 0) {
|
||
for (data.items) |*item| {
|
||
try p.recordDeclaredSymbol(item.name.ref.?);
|
||
}
|
||
}
|
||
},
|
||
.s_export_clause => |data| {
|
||
// "export {foo}"
|
||
var end: usize = 0;
|
||
var any_replaced = false;
|
||
if (p.options.features.replace_exports.count() > 0) {
|
||
for (data.items) |*item| {
|
||
const name = p.loadNameFromRef(item.name.ref.?);
|
||
|
||
const symbol = try p.findSymbol(item.alias_loc, name);
|
||
const ref = symbol.ref;
|
||
|
||
if (p.options.features.replace_exports.getPtr(name)) |entry| {
|
||
if (entry.* != .replace) p.ignoreUsage(symbol.ref);
|
||
_ = p.injectReplacementExport(stmts, symbol.ref, stmt.loc, entry);
|
||
any_replaced = true;
|
||
continue;
|
||
}
|
||
|
||
if (p.symbols.items[ref.innerIndex()].kind == .unbound) {
|
||
// Silently strip exports of non-local symbols in TypeScript, since
|
||
// those likely correspond to type-only exports. But report exports of
|
||
// non-local symbols as errors in JavaScript.
|
||
if (!is_typescript_enabled) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, item.name.loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{s}\" is not declared in this file", .{name});
|
||
}
|
||
continue;
|
||
}
|
||
|
||
item.name.ref = ref;
|
||
data.items[end] = item.*;
|
||
end += 1;
|
||
}
|
||
} else {
|
||
for (data.items) |*item| {
|
||
const name = p.loadNameFromRef(item.name.ref.?);
|
||
const symbol = try p.findSymbol(item.alias_loc, name);
|
||
const ref = symbol.ref;
|
||
|
||
if (p.symbols.items[ref.innerIndex()].kind == .unbound) {
|
||
// Silently strip exports of non-local symbols in TypeScript, since
|
||
// those likely correspond to type-only exports. But report exports of
|
||
// non-local symbols as errors in JavaScript.
|
||
if (!is_typescript_enabled) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, item.name.loc);
|
||
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "\"{s}\" is not declared in this file", .{name});
|
||
continue;
|
||
}
|
||
continue;
|
||
}
|
||
|
||
item.name.ref = ref;
|
||
data.items[end] = item.*;
|
||
end += 1;
|
||
}
|
||
}
|
||
|
||
const remove_for_tree_shaking = any_replaced and end == 0 and data.items.len > 0 and p.options.tree_shaking;
|
||
data.items.len = end;
|
||
|
||
if (remove_for_tree_shaking) {
|
||
return;
|
||
}
|
||
},
|
||
.s_export_from => |data| {
|
||
// "export {foo} from 'path'"
|
||
const name = p.loadNameFromRef(data.namespace_ref);
|
||
|
||
data.namespace_ref = try p.newSymbol(.other, name);
|
||
try p.current_scope.generated.push(p.allocator, data.namespace_ref);
|
||
try p.recordDeclaredSymbol(data.namespace_ref);
|
||
|
||
if (p.options.features.replace_exports.count() > 0) {
|
||
var j: usize = 0;
|
||
// This is a re-export and the symbols created here are used to reference
|
||
for (data.items) |item| {
|
||
const old_ref = item.name.ref.?;
|
||
|
||
if (p.options.features.replace_exports.count() > 0) {
|
||
if (p.options.features.replace_exports.getPtr(item.alias)) |entry| {
|
||
_ = p.injectReplacementExport(stmts, old_ref, logger.Loc.Empty, entry);
|
||
|
||
continue;
|
||
}
|
||
}
|
||
|
||
const _name = p.loadNameFromRef(old_ref);
|
||
|
||
const ref = try p.newSymbol(.import, _name);
|
||
try p.current_scope.generated.push(p.allocator, ref);
|
||
try p.recordDeclaredSymbol(ref);
|
||
data.items[j] = item;
|
||
data.items[j].name.ref = ref;
|
||
j += 1;
|
||
}
|
||
|
||
data.items.len = j;
|
||
|
||
if (j == 0 and data.items.len > 0) {
|
||
return;
|
||
}
|
||
} else {
|
||
// This is a re-export and the symbols created here are used to reference
|
||
for (data.items) |*item| {
|
||
const _name = p.loadNameFromRef(item.name.ref.?);
|
||
const ref = try p.newSymbol(.import, _name);
|
||
try p.current_scope.generated.push(p.allocator, ref);
|
||
try p.recordDeclaredSymbol(ref);
|
||
item.name.ref = ref;
|
||
}
|
||
}
|
||
},
|
||
.s_export_star => |data| {
|
||
// "export * from 'path'"
|
||
const name = p.loadNameFromRef(data.namespace_ref);
|
||
data.namespace_ref = try p.newSymbol(.other, name);
|
||
try p.current_scope.generated.push(p.allocator, data.namespace_ref);
|
||
try p.recordDeclaredSymbol(data.namespace_ref);
|
||
|
||
// "export * as ns from 'path'"
|
||
if (data.alias) |alias| {
|
||
if (p.options.features.replace_exports.count() > 0) {
|
||
if (p.options.features.replace_exports.getPtr(alias.original_name)) |entry| {
|
||
_ = p.injectReplacementExport(stmts, p.declareSymbol(.other, logger.Loc.Empty, alias.original_name) catch unreachable, logger.Loc.Empty, entry);
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.s_export_default => |data| {
|
||
defer {
|
||
if (data.default_name.ref) |ref| {
|
||
p.recordDeclaredSymbol(ref) catch unreachable;
|
||
}
|
||
}
|
||
|
||
var mark_for_replace: bool = false;
|
||
|
||
const orig_dead = p.is_control_flow_dead;
|
||
if (p.options.features.replace_exports.count() > 0) {
|
||
if (p.options.features.replace_exports.getPtr("default")) |entry| {
|
||
p.is_control_flow_dead = p.options.features.dead_code_elimination and (entry.* != .replace);
|
||
mark_for_replace = true;
|
||
}
|
||
}
|
||
|
||
defer {
|
||
p.is_control_flow_dead = orig_dead;
|
||
}
|
||
|
||
switch (data.value) {
|
||
.expr => |expr| {
|
||
const was_anonymous_named_expr = expr.isAnonymousNamed();
|
||
|
||
data.value.expr = p.visitExpr(expr);
|
||
|
||
if (p.is_control_flow_dead) {
|
||
return;
|
||
}
|
||
|
||
// Optionally preserve the name
|
||
|
||
data.value.expr = p.maybeKeepExprSymbolName(data.value.expr, js_ast.ClauseItem.default_alias, was_anonymous_named_expr);
|
||
|
||
// Discard type-only export default statements
|
||
if (is_typescript_enabled) {
|
||
switch (data.value.expr.data) {
|
||
.e_identifier => |ident| {
|
||
if (!ident.ref.isSourceContentsSlice()) {
|
||
const symbol = p.symbols.items[ident.ref.innerIndex()];
|
||
if (symbol.kind == .unbound) {
|
||
if (p.local_type_names.get(symbol.original_name)) |local_type| {
|
||
if (local_type) {
|
||
// the name points to a type
|
||
// don't try to declare this symbol
|
||
data.default_name.ref = null;
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
if (data.default_name.ref.?.isSourceContentsSlice()) {
|
||
data.default_name = createDefaultName(p, data.value.expr.loc) catch unreachable;
|
||
}
|
||
|
||
if (p.options.features.server_components.wrapsExports()) {
|
||
data.value.expr = p.wrapValueForServerComponentReference(data.value.expr, "default");
|
||
}
|
||
|
||
// If there are lowered "using" declarations, change this into a "var"
|
||
if (p.current_scope.parent == null and p.will_wrap_module_in_try_catch_for_using) {
|
||
try stmts.ensureUnusedCapacity(2);
|
||
|
||
const decls = p.allocator.alloc(G.Decl, 1) catch bun.outOfMemory();
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = data.default_name.ref.? }, data.default_name.loc),
|
||
.value = data.value.expr,
|
||
};
|
||
stmts.appendAssumeCapacity(p.s(S.Local{
|
||
.decls = G.Decl.List.init(decls),
|
||
}, stmt.loc));
|
||
const items = p.allocator.alloc(js_ast.ClauseItem, 1) catch bun.outOfMemory();
|
||
items[0] = js_ast.ClauseItem{
|
||
.alias = "default",
|
||
.alias_loc = data.default_name.loc,
|
||
.name = data.default_name,
|
||
};
|
||
stmts.appendAssumeCapacity(p.s(S.ExportClause{
|
||
.items = items,
|
||
}, stmt.loc));
|
||
}
|
||
|
||
if (mark_for_replace) {
|
||
const entry = p.options.features.replace_exports.getPtr("default").?;
|
||
if (entry.* == .replace) {
|
||
data.value.expr = entry.replace;
|
||
} else {
|
||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||
return;
|
||
}
|
||
}
|
||
},
|
||
|
||
.stmt => |s2| {
|
||
switch (s2.data) {
|
||
.s_function => |func| {
|
||
var name: string = "";
|
||
if (func.func.name) |func_loc| {
|
||
name = p.loadNameFromRef(func_loc.ref.?);
|
||
} else {
|
||
func.func.name = data.default_name;
|
||
name = js_ast.ClauseItem.default_alias;
|
||
}
|
||
|
||
var react_hook_data: ?ReactRefresh.HookContext = null;
|
||
const prev = p.react_refresh.hook_ctx_storage;
|
||
defer p.react_refresh.hook_ctx_storage = prev;
|
||
p.react_refresh.hook_ctx_storage = &react_hook_data;
|
||
|
||
func.func = p.visitFunc(func.func, func.func.open_parens_loc);
|
||
|
||
if (react_hook_data) |*hook| {
|
||
stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb)) catch bun.outOfMemory();
|
||
|
||
data.value = .{
|
||
.expr = p.getReactRefreshHookSignalInit(hook, p.newExpr(
|
||
E.Function{ .func = func.func },
|
||
stmt.loc,
|
||
)),
|
||
};
|
||
}
|
||
|
||
if (p.is_control_flow_dead) {
|
||
return;
|
||
}
|
||
|
||
if (mark_for_replace) {
|
||
const entry = p.options.features.replace_exports.getPtr("default").?;
|
||
if (entry.* == .replace) {
|
||
data.value = .{ .expr = entry.replace };
|
||
} else {
|
||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||
return;
|
||
}
|
||
}
|
||
|
||
if (data.default_name.ref.?.isSourceContentsSlice()) {
|
||
data.default_name = createDefaultName(p, stmt.loc) catch unreachable;
|
||
}
|
||
|
||
if (p.options.features.server_components.wrapsExports()) {
|
||
data.value = .{ .expr = p.wrapValueForServerComponentReference(p.newExpr(E.Function{ .func = func.func }, stmt.loc), "default") };
|
||
}
|
||
|
||
stmts.append(stmt.*) catch unreachable;
|
||
|
||
// if (func.func.name != null and func.func.name.?.ref != null) {
|
||
// stmts.append(p.keepStmtSymbolName(func.func.name.?.loc, func.func.name.?.ref.?, name)) catch unreachable;
|
||
// }
|
||
// prevent doubling export default function name
|
||
return;
|
||
},
|
||
.s_class => |class| {
|
||
_ = p.visitClass(s2.loc, &class.class, data.default_name.ref.?);
|
||
|
||
if (p.is_control_flow_dead)
|
||
return;
|
||
|
||
if (mark_for_replace) {
|
||
const entry = p.options.features.replace_exports.getPtr("default").?;
|
||
if (entry.* == .replace) {
|
||
data.value = .{ .expr = entry.replace };
|
||
} else {
|
||
_ = p.injectReplacementExport(stmts, Ref.None, logger.Loc.Empty, entry);
|
||
return;
|
||
}
|
||
}
|
||
|
||
if (data.default_name.ref.?.isSourceContentsSlice()) {
|
||
data.default_name = createDefaultName(p, stmt.loc) catch unreachable;
|
||
}
|
||
|
||
// We only inject a name into classes when there is a decorator
|
||
if (class.class.has_decorators) {
|
||
if (class.class.class_name == null or
|
||
class.class.class_name.?.ref == null)
|
||
{
|
||
class.class.class_name = data.default_name;
|
||
}
|
||
}
|
||
|
||
// This is to handle TS decorators, mostly.
|
||
var class_stmts = p.lowerClass(.{ .stmt = s2 });
|
||
bun.assert(class_stmts[0].data == .s_class);
|
||
|
||
if (class_stmts.len > 1) {
|
||
data.value.stmt = class_stmts[0];
|
||
stmts.append(stmt.*) catch {};
|
||
stmts.appendSlice(class_stmts[1..]) catch {};
|
||
} else {
|
||
data.value.stmt = class_stmts[0];
|
||
stmts.append(stmt.*) catch {};
|
||
}
|
||
|
||
if (p.options.features.server_components.wrapsExports()) {
|
||
data.value = .{ .expr = p.wrapValueForServerComponentReference(p.newExpr(class.class, stmt.loc), "default") };
|
||
}
|
||
|
||
return;
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
}
|
||
},
|
||
.s_export_equals => |data| {
|
||
// "module.exports = value"
|
||
stmts.append(
|
||
Stmt.assign(
|
||
p.@"module.exports"(stmt.loc),
|
||
p.visitExpr(data.value),
|
||
),
|
||
) catch unreachable;
|
||
p.recordUsage(p.module_ref);
|
||
return;
|
||
},
|
||
.s_break => |data| {
|
||
if (data.label) |*label| {
|
||
const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected label to have a ref", .{}, label.loc));
|
||
const res = p.findLabelSymbol(label.loc, name);
|
||
if (res.found) {
|
||
label.ref = res.ref;
|
||
} else {
|
||
data.label = null;
|
||
}
|
||
} else if (!p.fn_or_arrow_data_visit.is_inside_loop and !p.fn_or_arrow_data_visit.is_inside_switch) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, stmt.loc);
|
||
p.log.addRangeError(p.source, r, "Cannot use \"break\" here") catch unreachable;
|
||
}
|
||
},
|
||
.s_continue => |data| {
|
||
if (data.label) |*label| {
|
||
const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected continue label to have a ref", .{}, label.loc));
|
||
const res = p.findLabelSymbol(label.loc, name);
|
||
label.ref = res.ref;
|
||
if (res.found and !res.is_loop) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, stmt.loc);
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot \"continue\" to label {s}", .{name}) catch unreachable;
|
||
}
|
||
} else if (!p.fn_or_arrow_data_visit.is_inside_loop) {
|
||
const r = js_lexer.rangeOfIdentifier(p.source, stmt.loc);
|
||
p.log.addRangeError(p.source, r, "Cannot use \"continue\" here") catch unreachable;
|
||
}
|
||
},
|
||
.s_label => |data| {
|
||
p.pushScopeForVisitPass(.label, stmt.loc) catch unreachable;
|
||
const name = p.loadNameFromRef(data.name.ref.?);
|
||
const ref = p.newSymbol(.label, name) catch unreachable;
|
||
data.name.ref = ref;
|
||
p.current_scope.label_ref = ref;
|
||
switch (data.stmt.data) {
|
||
.s_for, .s_for_in, .s_for_of, .s_while, .s_do_while => {
|
||
p.current_scope.label_stmt_is_loop = true;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
data.stmt = p.visitSingleStmt(data.stmt, StmtsKind.none);
|
||
p.popScope();
|
||
},
|
||
.s_local => |data| {
|
||
// TODO: Silently remove unsupported top-level "await" in dead code branches
|
||
// (this was from 'await using' syntax)
|
||
|
||
// Local statements do not end the const local prefix
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
|
||
const decls_len = if (!(data.is_export and p.options.features.replace_exports.entries.len > 0))
|
||
p.visitDecls(data.decls.slice(), data.kind == .k_const, false)
|
||
else
|
||
p.visitDecls(data.decls.slice(), data.kind == .k_const, true);
|
||
|
||
const is_now_dead = data.decls.len > 0 and decls_len == 0;
|
||
if (is_now_dead) {
|
||
return;
|
||
}
|
||
|
||
data.decls.len = @as(u32, @truncate(decls_len));
|
||
|
||
// Handle being exported inside a namespace
|
||
if (data.is_export and p.enclosing_namespace_arg_ref != null) {
|
||
for (data.decls.slice()) |*d| {
|
||
if (d.value) |val| {
|
||
p.recordUsage((p.enclosing_namespace_arg_ref orelse unreachable));
|
||
// TODO: is it necessary to lowerAssign? why does esbuild do it _most_ of the time?
|
||
stmts.append(p.s(S.SExpr{
|
||
.value = Expr.assign(Binding.toExpr(&d.binding, p.to_expr_wrapper_namespace), val),
|
||
}, stmt.loc)) catch unreachable;
|
||
}
|
||
}
|
||
|
||
return;
|
||
}
|
||
|
||
// Optimization: Avoid unnecessary "using" machinery by changing ones
|
||
// initialized to "null" or "undefined" into a normal variable. Note that
|
||
// "await using" still needs the "await", so we can't do it for those.
|
||
if (p.options.features.minify_syntax and data.kind == .k_using) {
|
||
data.kind = .k_let;
|
||
for (data.decls.slice()) |*d| {
|
||
if (d.value) |val| {
|
||
if (val.data != .e_null and val.data != .e_undefined) {
|
||
data.kind = .k_using;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// We must relocate vars in order to safely handle removing if/else depending on NODE_ENV.
|
||
// Edgecase:
|
||
// `export var` is skipped because it's unnecessary. That *should* be a noop, but it loses the `is_export` flag if we're in HMR.
|
||
const kind = p.selectLocalKind(data.kind);
|
||
if (kind == .k_var and !data.is_export) {
|
||
const relocated = p.maybeRelocateVarsToTopLevel(data.decls.slice(), .normal);
|
||
if (relocated.ok) {
|
||
if (relocated.stmt) |new_stmt| {
|
||
stmts.append(new_stmt) catch unreachable;
|
||
}
|
||
|
||
return;
|
||
}
|
||
}
|
||
|
||
data.kind = kind;
|
||
try stmts.append(stmt.*);
|
||
|
||
if (data.is_export and p.options.features.server_components.wrapsExports()) {
|
||
for (data.decls.slice()) |*decl| try_annotate: {
|
||
const val = decl.value orelse break :try_annotate;
|
||
switch (val.data) {
|
||
.e_arrow, .e_function => {},
|
||
else => break :try_annotate,
|
||
}
|
||
const id = switch (decl.binding.data) {
|
||
.b_identifier => |id| id.ref,
|
||
else => break :try_annotate,
|
||
};
|
||
const original_name = p.symbols.items[id.innerIndex()].original_name;
|
||
decl.value = p.wrapValueForServerComponentReference(val, original_name);
|
||
}
|
||
}
|
||
|
||
if (p.options.features.react_fast_refresh and p.current_scope == p.module_scope) {
|
||
for (data.decls.slice()) |decl| try_register: {
|
||
const val = decl.value orelse break :try_register;
|
||
switch (val.data) {
|
||
.e_arrow, .e_function => {},
|
||
else => break :try_register,
|
||
}
|
||
const id = switch (decl.binding.data) {
|
||
.b_identifier => |id| id.ref,
|
||
else => break :try_register,
|
||
};
|
||
const original_name = p.symbols.items[id.innerIndex()].original_name;
|
||
try p.handleReactRefreshRegister(stmts, original_name, id);
|
||
}
|
||
}
|
||
|
||
return;
|
||
},
|
||
.s_expr => |data| {
|
||
const should_trim_primitive = p.options.features.dead_code_elimination and
|
||
(p.options.features.minify_syntax and data.value.isPrimitiveLiteral());
|
||
p.stmt_expr_value = data.value.data;
|
||
defer p.stmt_expr_value = .{ .e_missing = .{} };
|
||
|
||
const is_top_level = p.current_scope == p.module_scope;
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
p.commonjs_named_exports_needs_conversion = if (is_top_level)
|
||
std.math.maxInt(u32)
|
||
else
|
||
p.commonjs_named_exports_needs_conversion;
|
||
}
|
||
|
||
data.value = p.visitExpr(data.value);
|
||
|
||
if (should_trim_primitive and data.value.isPrimitiveLiteral()) {
|
||
return;
|
||
}
|
||
|
||
// simplify unused
|
||
data.value = SideEffects.simplifyUnusedExpr(p, data.value) orelse return;
|
||
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
if (is_top_level) {
|
||
if (data.value.data == .e_binary) {
|
||
const to_convert = p.commonjs_named_exports_needs_conversion;
|
||
if (to_convert != std.math.maxInt(u32)) {
|
||
p.commonjs_named_exports_needs_conversion = std.math.maxInt(u32);
|
||
convert: {
|
||
const bin: *E.Binary = data.value.data.e_binary;
|
||
if (bin.op == .bin_assign and bin.left.data == .e_commonjs_export_identifier) {
|
||
var last = &p.commonjs_named_exports.values()[to_convert];
|
||
if (!last.needs_decl) break :convert;
|
||
last.needs_decl = false;
|
||
|
||
var decls = p.allocator.alloc(Decl, 1) catch unreachable;
|
||
const ref = bin.left.data.e_commonjs_export_identifier.ref;
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = ref }, bin.left.loc),
|
||
.value = bin.right,
|
||
};
|
||
// we have to ensure these are known to be top-level
|
||
p.declared_symbols.append(p.allocator, .{
|
||
.ref = ref,
|
||
.is_top_level = true,
|
||
}) catch unreachable;
|
||
p.esm_export_keyword.loc = stmt.loc;
|
||
p.esm_export_keyword.len = 5;
|
||
p.had_commonjs_named_exports_this_visit = true;
|
||
var clause_items = p.allocator.alloc(js_ast.ClauseItem, 1) catch unreachable;
|
||
clause_items[0] = js_ast.ClauseItem{
|
||
// We want the generated name to not conflict
|
||
.alias = p.commonjs_named_exports.keys()[to_convert],
|
||
.alias_loc = bin.left.loc,
|
||
.name = .{
|
||
.ref = ref,
|
||
.loc = last.loc_ref.loc,
|
||
},
|
||
};
|
||
stmts.appendSlice(
|
||
&[_]Stmt{
|
||
p.s(
|
||
S.Local{
|
||
.kind = .k_var,
|
||
.is_export = false,
|
||
.was_commonjs_export = true,
|
||
.decls = G.Decl.List.init(decls),
|
||
},
|
||
stmt.loc,
|
||
),
|
||
p.s(
|
||
S.ExportClause{
|
||
.items = clause_items,
|
||
.is_single_line = true,
|
||
},
|
||
stmt.loc,
|
||
),
|
||
},
|
||
) catch unreachable;
|
||
|
||
return;
|
||
}
|
||
}
|
||
} else if (p.commonjs_replacement_stmts.len > 0) {
|
||
if (stmts.items.len == 0) {
|
||
stmts.items = p.commonjs_replacement_stmts;
|
||
stmts.capacity = p.commonjs_replacement_stmts.len;
|
||
p.commonjs_replacement_stmts.len = 0;
|
||
} else {
|
||
stmts.appendSlice(p.commonjs_replacement_stmts) catch unreachable;
|
||
p.commonjs_replacement_stmts.len = 0;
|
||
}
|
||
|
||
return;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.s_throw => |data| {
|
||
data.value = p.visitExpr(data.value);
|
||
},
|
||
.s_return => |data| {
|
||
// Forbid top-level return inside modules with ECMAScript-style exports
|
||
if (p.fn_or_arrow_data_visit.is_outside_fn_or_arrow) {
|
||
const where = where: {
|
||
if (p.esm_export_keyword.len > 0) {
|
||
break :where p.esm_export_keyword;
|
||
} else if (p.top_level_await_keyword.len > 0) {
|
||
break :where p.top_level_await_keyword;
|
||
} else {
|
||
break :where logger.Range.None;
|
||
}
|
||
};
|
||
|
||
if (where.len > 0) {
|
||
p.log.addRangeError(p.source, where, "Top-level return cannot be used inside an ECMAScript module") catch unreachable;
|
||
}
|
||
}
|
||
|
||
if (data.value) |val| {
|
||
data.value = p.visitExpr(val);
|
||
|
||
// "return undefined;" can safely just always be "return;"
|
||
if (data.value != null and @as(Expr.Tag, data.value.?.data) == .e_undefined) {
|
||
// Returning undefined is implicit
|
||
data.value = null;
|
||
}
|
||
}
|
||
},
|
||
.s_block => |data| {
|
||
{
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
|
||
// Pass the "is loop body" status on to the direct children of a block used
|
||
// as a loop body. This is used to enable optimizations specific to the
|
||
// topmost scope in a loop body block.
|
||
const kind = if (std.meta.eql(p.loop_body, stmt.data)) StmtsKind.loop_body else StmtsKind.none;
|
||
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, data.stmts);
|
||
p.visitStmts(&_stmts, kind) catch unreachable;
|
||
data.stmts = _stmts.items;
|
||
p.popScope();
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
// // trim empty statements
|
||
if (data.stmts.len == 0) {
|
||
stmts.append(Stmt{ .data = Prefill.Data.SEmpty, .loc = stmt.loc }) catch unreachable;
|
||
return;
|
||
} else if (data.stmts.len == 1 and !statementCaresAboutScope(data.stmts[0])) {
|
||
// Unwrap blocks containing a single statement
|
||
stmts.append(data.stmts[0]) catch unreachable;
|
||
return;
|
||
}
|
||
}
|
||
},
|
||
.s_with => |data| {
|
||
data.value = p.visitExpr(data.value);
|
||
|
||
p.pushScopeForVisitPass(.with, data.body_loc) catch unreachable;
|
||
|
||
// This can be many different kinds of statements.
|
||
// example code:
|
||
//
|
||
// with(this.document.defaultView || Object.create(null))
|
||
// with(this.document)
|
||
// with(this.form)
|
||
// with(this.element)
|
||
//
|
||
data.body = p.visitSingleStmt(data.body, StmtsKind.none);
|
||
|
||
p.popScope();
|
||
},
|
||
.s_while => |data| {
|
||
data.test_ = p.visitExpr(data.test_);
|
||
data.body = p.visitLoopBody(data.body);
|
||
|
||
data.test_ = SideEffects.simplifyBoolean(p, data.test_);
|
||
const result = SideEffects.toBoolean(p, data.test_.data);
|
||
if (result.ok and result.side_effects == .no_side_effects) {
|
||
data.test_ = p.newExpr(E.Boolean{ .value = result.value }, data.test_.loc);
|
||
}
|
||
},
|
||
.s_do_while => |data| {
|
||
data.body = p.visitLoopBody(data.body);
|
||
data.test_ = p.visitExpr(data.test_);
|
||
|
||
data.test_ = SideEffects.simplifyBoolean(p, data.test_);
|
||
},
|
||
.s_if => |data| {
|
||
data.test_ = p.visitExpr(data.test_);
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
data.test_ = SideEffects.simplifyBoolean(p, data.test_);
|
||
}
|
||
|
||
const effects = SideEffects.toBoolean(p, data.test_.data);
|
||
if (effects.ok and !effects.value) {
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
data.yes = p.visitSingleStmt(data.yes, StmtsKind.none);
|
||
p.is_control_flow_dead = old;
|
||
} else {
|
||
data.yes = p.visitSingleStmt(data.yes, StmtsKind.none);
|
||
}
|
||
|
||
// The "else" clause is optional
|
||
if (data.no) |no| {
|
||
if (effects.ok and effects.value) {
|
||
const old = p.is_control_flow_dead;
|
||
p.is_control_flow_dead = true;
|
||
defer p.is_control_flow_dead = old;
|
||
data.no = p.visitSingleStmt(no, .none);
|
||
} else {
|
||
data.no = p.visitSingleStmt(no, .none);
|
||
}
|
||
|
||
// Trim unnecessary "else" clauses
|
||
if (p.options.features.minify_syntax) {
|
||
if (data.no != null and @as(Stmt.Tag, data.no.?.data) == .s_empty) {
|
||
data.no = null;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.options.features.minify_syntax) {
|
||
if (effects.ok) {
|
||
if (effects.value) {
|
||
if (data.no == null or !SideEffects.shouldKeepStmtInDeadControlFlow(p, data.no.?, p.allocator)) {
|
||
if (effects.side_effects == .could_have_side_effects) {
|
||
// Keep the condition if it could have side effects (but is still known to be truthy)
|
||
if (SideEffects.simplifyUnusedExpr(p, data.test_)) |test_| {
|
||
stmts.append(p.s(S.SExpr{ .value = test_ }, test_.loc)) catch unreachable;
|
||
}
|
||
}
|
||
|
||
return try p.appendIfBodyPreservingScope(stmts, data.yes);
|
||
} else {
|
||
// We have to keep the "no" branch
|
||
}
|
||
} else {
|
||
// The test is falsy
|
||
if (!SideEffects.shouldKeepStmtInDeadControlFlow(p, data.yes, p.allocator)) {
|
||
if (effects.side_effects == .could_have_side_effects) {
|
||
// Keep the condition if it could have side effects (but is still known to be truthy)
|
||
if (SideEffects.simplifyUnusedExpr(p, data.test_)) |test_| {
|
||
stmts.append(p.s(S.SExpr{ .value = test_ }, test_.loc)) catch unreachable;
|
||
}
|
||
}
|
||
|
||
if (data.no == null) {
|
||
return;
|
||
}
|
||
|
||
return try p.appendIfBodyPreservingScope(stmts, data.no.?);
|
||
}
|
||
}
|
||
}
|
||
|
||
// TODO: more if statement syntax minification
|
||
const can_remove_test = p.exprCanBeRemovedIfUnused(&data.test_);
|
||
switch (data.yes.data) {
|
||
.s_expr => |yes_expr| {
|
||
if (yes_expr.value.isMissing()) {
|
||
if (data.no == null) {
|
||
if (can_remove_test) {
|
||
return;
|
||
}
|
||
} else if (data.no.?.isMissingExpr() and can_remove_test) {
|
||
return;
|
||
}
|
||
}
|
||
},
|
||
.s_empty => {
|
||
if (data.no == null) {
|
||
if (can_remove_test) {
|
||
return;
|
||
}
|
||
} else if (data.no.?.isMissingExpr() and can_remove_test) {
|
||
return;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
},
|
||
.s_for => |data| {
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
|
||
if (data.init) |initst| {
|
||
data.init = p.visitForLoopInit(initst, false);
|
||
}
|
||
|
||
if (data.test_) |test_| {
|
||
data.test_ = SideEffects.simplifyBoolean(p, p.visitExpr(test_));
|
||
|
||
const result = SideEffects.toBoolean(p, data.test_.?.data);
|
||
if (result.ok and result.value and result.side_effects == .no_side_effects) {
|
||
data.test_ = null;
|
||
}
|
||
}
|
||
|
||
if (data.update) |update| {
|
||
data.update = p.visitExpr(update);
|
||
}
|
||
|
||
data.body = p.visitLoopBody(data.body);
|
||
|
||
if (data.init) |for_init| {
|
||
if (for_init.data == .s_local) {
|
||
// Potentially relocate "var" declarations to the top level. Note that this
|
||
// must be done inside the scope of the for loop or they won't be relocated.
|
||
if (for_init.data.s_local.kind == .k_var) {
|
||
const relocate = p.maybeRelocateVarsToTopLevel(for_init.data.s_local.decls.slice(), .normal);
|
||
if (relocate.stmt) |relocated| {
|
||
data.init = relocated;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
p.popScope();
|
||
},
|
||
.s_for_in => |data| {
|
||
{
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
defer p.popScope();
|
||
_ = p.visitForLoopInit(data.init, true);
|
||
data.value = p.visitExpr(data.value);
|
||
data.body = p.visitLoopBody(data.body);
|
||
|
||
// Check for a variable initializer
|
||
if (data.init.data == .s_local and data.init.data.s_local.kind == .k_var) {
|
||
// Lower for-in variable initializers in case the output is used in strict mode
|
||
var local = data.init.data.s_local;
|
||
if (local.decls.len == 1) {
|
||
var decl: *G.Decl = &local.decls.ptr[0];
|
||
if (decl.binding.data == .b_identifier) {
|
||
if (decl.value) |val| {
|
||
stmts.append(
|
||
Stmt.assign(
|
||
Expr.initIdentifier(decl.binding.data.b_identifier.ref, decl.binding.loc),
|
||
val,
|
||
),
|
||
) catch unreachable;
|
||
decl.value = null;
|
||
}
|
||
}
|
||
}
|
||
|
||
const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of);
|
||
if (relocate.stmt) |relocated_stmt| {
|
||
data.init = relocated_stmt;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.s_for_of => |data| {
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
defer p.popScope();
|
||
_ = p.visitForLoopInit(data.init, true);
|
||
data.value = p.visitExpr(data.value);
|
||
data.body = p.visitLoopBody(data.body);
|
||
|
||
if (data.init.data == .s_local) {
|
||
if (data.init.data.s_local.kind == .k_var) {
|
||
const relocate = p.maybeRelocateVarsToTopLevel(data.init.data.s_local.decls.slice(), RelocateVars.Mode.for_in_or_for_of);
|
||
if (relocate.stmt) |relocated_stmt| {
|
||
data.init = relocated_stmt;
|
||
}
|
||
}
|
||
|
||
// Handle "for (using x of y)" and "for (await using x of y)"
|
||
if (data.init.data == .s_local and data.init.data.s_local.kind.isUsing() and p.options.features.lower_using) {
|
||
// fn lowerUsingDeclarationInForOf()
|
||
const loc = data.init.loc;
|
||
const init2 = data.init.data.s_local;
|
||
const binding = init2.decls.at(0).binding;
|
||
var id = binding.data.b_identifier;
|
||
const temp_ref = p.generateTempRef(p.symbols.items[id.ref.inner_index].original_name);
|
||
|
||
const first = p.s(S.Local{
|
||
.kind = init2.kind,
|
||
.decls = bindings: {
|
||
const decls = p.allocator.alloc(G.Decl, 1) catch bun.outOfMemory();
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = id.ref }, loc),
|
||
.value = p.newExpr(E.Identifier{ .ref = temp_ref }, loc),
|
||
};
|
||
break :bindings G.Decl.List.init(decls);
|
||
},
|
||
}, loc);
|
||
|
||
const length = if (data.body.data == .s_block) data.body.data.s_block.stmts.len else 1;
|
||
const statements = p.allocator.alloc(Stmt, 1 + length) catch bun.outOfMemory();
|
||
statements[0] = first;
|
||
if (data.body.data == .s_block) {
|
||
@memcpy(statements[1..], data.body.data.s_block.stmts);
|
||
} else {
|
||
statements[1] = data.body;
|
||
}
|
||
|
||
var ctx = try P.LowerUsingDeclarationsContext.init(p);
|
||
ctx.scanStmts(p, statements);
|
||
const visited_stmts = ctx.finalize(p, statements, p.will_wrap_module_in_try_catch_for_using and p.current_scope.parent == null);
|
||
if (data.body.data == .s_block) {
|
||
data.body.data.s_block.stmts = visited_stmts.items;
|
||
} else {
|
||
data.body = p.s(S.Block{
|
||
.stmts = visited_stmts.items,
|
||
}, loc);
|
||
}
|
||
id.ref = temp_ref;
|
||
init2.kind = .k_const;
|
||
}
|
||
}
|
||
},
|
||
.s_try => |data| {
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
{
|
||
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, data.body);
|
||
p.fn_or_arrow_data_visit.try_body_count += 1;
|
||
p.visitStmts(&_stmts, StmtsKind.none) catch unreachable;
|
||
p.fn_or_arrow_data_visit.try_body_count -= 1;
|
||
data.body = _stmts.items;
|
||
}
|
||
p.popScope();
|
||
|
||
if (data.catch_) |*catch_| {
|
||
p.pushScopeForVisitPass(.catch_binding, catch_.loc) catch unreachable;
|
||
{
|
||
if (catch_.binding) |catch_binding| {
|
||
p.visitBinding(catch_binding, null);
|
||
}
|
||
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, catch_.body);
|
||
p.pushScopeForVisitPass(.block, catch_.body_loc) catch unreachable;
|
||
p.visitStmts(&_stmts, StmtsKind.none) catch unreachable;
|
||
p.popScope();
|
||
catch_.body = _stmts.items;
|
||
}
|
||
p.popScope();
|
||
}
|
||
|
||
if (data.finally) |*finally| {
|
||
p.pushScopeForVisitPass(.block, finally.loc) catch unreachable;
|
||
{
|
||
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, finally.stmts);
|
||
p.visitStmts(&_stmts, StmtsKind.none) catch unreachable;
|
||
finally.stmts = _stmts.items;
|
||
}
|
||
p.popScope();
|
||
}
|
||
},
|
||
.s_switch => |data| {
|
||
data.test_ = p.visitExpr(data.test_);
|
||
{
|
||
p.pushScopeForVisitPass(.block, data.body_loc) catch unreachable;
|
||
defer p.popScope();
|
||
const old_is_inside_Swsitch = p.fn_or_arrow_data_visit.is_inside_switch;
|
||
p.fn_or_arrow_data_visit.is_inside_switch = true;
|
||
defer p.fn_or_arrow_data_visit.is_inside_switch = old_is_inside_Swsitch;
|
||
for (data.cases, 0..) |case, i| {
|
||
if (case.value) |val| {
|
||
data.cases[i].value = p.visitExpr(val);
|
||
// TODO: error messages
|
||
// Check("case", *c.Value, c.Value.Loc)
|
||
// p.warnAboutTypeofAndString(s.Test, *c.Value)
|
||
}
|
||
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, case.body);
|
||
p.visitStmts(&_stmts, StmtsKind.none) catch unreachable;
|
||
data.cases[i].body = _stmts.items;
|
||
}
|
||
}
|
||
// TODO: duplicate case checker
|
||
|
||
},
|
||
.s_function => |data| {
|
||
// We mark it as dead, but the value may not actually be dead
|
||
// We just want to be sure to not increment the usage counts for anything in the function
|
||
const mark_as_dead = p.options.features.dead_code_elimination and data.func.flags.contains(.is_export) and
|
||
p.options.features.replace_exports.count() > 0 and p.isExportToEliminate(data.func.name.?.ref.?);
|
||
const original_is_dead = p.is_control_flow_dead;
|
||
|
||
if (mark_as_dead) {
|
||
p.is_control_flow_dead = true;
|
||
}
|
||
defer {
|
||
if (mark_as_dead) {
|
||
p.is_control_flow_dead = original_is_dead;
|
||
}
|
||
}
|
||
|
||
var react_hook_data: ?ReactRefresh.HookContext = null;
|
||
const prev = p.react_refresh.hook_ctx_storage;
|
||
defer p.react_refresh.hook_ctx_storage = prev;
|
||
p.react_refresh.hook_ctx_storage = &react_hook_data;
|
||
|
||
data.func = p.visitFunc(data.func, data.func.open_parens_loc);
|
||
|
||
const name_ref = data.func.name.?.ref.?;
|
||
bun.assert(name_ref.tag == .symbol);
|
||
const name_symbol = &p.symbols.items[name_ref.innerIndex()];
|
||
const original_name = name_symbol.original_name;
|
||
|
||
// Handle exporting this function from a namespace
|
||
if (data.func.flags.contains(.is_export) and p.enclosing_namespace_arg_ref != null) {
|
||
data.func.flags.remove(.is_export);
|
||
|
||
const enclosing_namespace_arg_ref = p.enclosing_namespace_arg_ref orelse bun.outOfMemory();
|
||
stmts.ensureUnusedCapacity(3) catch bun.outOfMemory();
|
||
stmts.appendAssumeCapacity(stmt.*);
|
||
stmts.appendAssumeCapacity(Stmt.assign(
|
||
p.newExpr(E.Dot{
|
||
.target = p.newExpr(E.Identifier{ .ref = enclosing_namespace_arg_ref }, stmt.loc),
|
||
.name = original_name,
|
||
.name_loc = data.func.name.?.loc,
|
||
}, stmt.loc),
|
||
p.newExpr(E.Identifier{ .ref = data.func.name.?.ref.? }, data.func.name.?.loc),
|
||
));
|
||
} else if (!mark_as_dead) {
|
||
if (name_symbol.remove_overwritten_function_declaration) {
|
||
return;
|
||
}
|
||
|
||
if (p.options.features.server_components.wrapsExports() and data.func.flags.contains(.is_export)) {
|
||
// Convert this into `export var <name> = registerClientReference(<func>, ...);`
|
||
const name = data.func.name.?;
|
||
// From the inner scope, have code reference the wrapped function.
|
||
data.func.name = null;
|
||
try stmts.append(p.s(S.Local{
|
||
.kind = .k_var,
|
||
.is_export = true,
|
||
.decls = try G.Decl.List.fromSlice(p.allocator, &.{.{
|
||
.binding = p.b(B.Identifier{ .ref = name_ref }, name.loc),
|
||
.value = p.wrapValueForServerComponentReference(
|
||
p.newExpr(E.Function{ .func = data.func }, stmt.loc),
|
||
original_name,
|
||
),
|
||
}}),
|
||
}, stmt.loc));
|
||
} else {
|
||
stmts.append(stmt.*) catch bun.outOfMemory();
|
||
}
|
||
} else if (mark_as_dead) {
|
||
if (p.options.features.replace_exports.getPtr(original_name)) |replacement| {
|
||
_ = p.injectReplacementExport(stmts, name_ref, data.func.name.?.loc, replacement);
|
||
}
|
||
}
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
if (react_hook_data) |*hook| {
|
||
try stmts.append(p.getReactRefreshHookSignalDecl(hook.signature_cb));
|
||
try stmts.append(p.s(S.SExpr{
|
||
.value = p.getReactRefreshHookSignalInit(hook, Expr.initIdentifier(name_ref, logger.Loc.Empty)),
|
||
}, logger.Loc.Empty));
|
||
}
|
||
|
||
if (p.current_scope == p.module_scope) {
|
||
try p.handleReactRefreshRegister(stmts, original_name, name_ref);
|
||
}
|
||
}
|
||
|
||
return;
|
||
},
|
||
.s_class => |data| {
|
||
const mark_as_dead = p.options.features.dead_code_elimination and data.is_export and
|
||
p.options.features.replace_exports.count() > 0 and p.isExportToEliminate(data.class.class_name.?.ref.?);
|
||
const original_is_dead = p.is_control_flow_dead;
|
||
|
||
if (mark_as_dead) {
|
||
p.is_control_flow_dead = true;
|
||
}
|
||
defer {
|
||
if (mark_as_dead) {
|
||
p.is_control_flow_dead = original_is_dead;
|
||
}
|
||
}
|
||
|
||
_ = p.visitClass(stmt.loc, &data.class, Ref.None);
|
||
|
||
// Remove the export flag inside a namespace
|
||
const was_export_inside_namespace = data.is_export and p.enclosing_namespace_arg_ref != null;
|
||
if (was_export_inside_namespace) {
|
||
data.is_export = false;
|
||
}
|
||
|
||
const lowered = p.lowerClass(js_ast.StmtOrExpr{ .stmt = stmt.* });
|
||
|
||
if (!mark_as_dead or was_export_inside_namespace)
|
||
// Lower class field syntax for browsers that don't support it
|
||
stmts.appendSlice(lowered) catch unreachable
|
||
else {
|
||
const ref = data.class.class_name.?.ref.?;
|
||
if (p.options.features.replace_exports.getPtr(p.loadNameFromRef(ref))) |replacement| {
|
||
if (p.injectReplacementExport(stmts, ref, data.class.class_name.?.loc, replacement)) {
|
||
p.is_control_flow_dead = original_is_dead;
|
||
}
|
||
}
|
||
}
|
||
|
||
// Handle exporting this class from a namespace
|
||
if (was_export_inside_namespace) {
|
||
stmts.append(
|
||
Stmt.assign(
|
||
p.newExpr(
|
||
E.Dot{
|
||
.target = p.newExpr(
|
||
E.Identifier{ .ref = p.enclosing_namespace_arg_ref.? },
|
||
stmt.loc,
|
||
),
|
||
.name = p.symbols.items[data.class.class_name.?.ref.?.innerIndex()].original_name,
|
||
.name_loc = data.class.class_name.?.loc,
|
||
},
|
||
stmt.loc,
|
||
),
|
||
p.newExpr(
|
||
E.Identifier{ .ref = data.class.class_name.?.ref.? },
|
||
data.class.class_name.?.loc,
|
||
),
|
||
),
|
||
) catch unreachable;
|
||
}
|
||
|
||
return;
|
||
},
|
||
.s_enum => |data| {
|
||
// Do not end the const local prefix after TypeScript enums. We process
|
||
// them first within their scope so that they are inlined into all code in
|
||
// that scope. We don't want that to cause the const local prefix to end.
|
||
p.current_scope.is_after_const_local_prefix = was_after_after_const_local_prefix;
|
||
|
||
// Track cross-module enum constants during bundling. This
|
||
// part of the code is different from esbuilt in that we are
|
||
// only storing a list of enum indexes. At the time of
|
||
// referencing, `esbuild` builds a separate hash map of hash
|
||
// maps. We are avoiding that to reduce memory usage, since
|
||
// enum inlining already uses alot of hash maps.
|
||
if (p.current_scope == p.module_scope and p.options.bundle) {
|
||
try p.top_level_enums.append(p.allocator, data.name.ref.?);
|
||
}
|
||
|
||
p.recordDeclaredSymbol(data.name.ref.?) catch bun.outOfMemory();
|
||
p.pushScopeForVisitPass(.entry, stmt.loc) catch bun.outOfMemory();
|
||
defer p.popScope();
|
||
p.recordDeclaredSymbol(data.arg) catch bun.outOfMemory();
|
||
|
||
const allocator = p.allocator;
|
||
// Scan ahead for any variables inside this namespace. This must be done
|
||
// ahead of time before visiting any statements inside the namespace
|
||
// because we may end up visiting the uses before the declarations.
|
||
// We need to convert the uses into property accesses on the namespace.
|
||
for (data.values) |value| {
|
||
if (value.ref.isValid()) {
|
||
p.is_exported_inside_namespace.put(allocator, value.ref, data.arg) catch bun.outOfMemory();
|
||
}
|
||
}
|
||
|
||
// Values without initializers are initialized to one more than the
|
||
// previous value if the previous value is numeric. Otherwise values
|
||
// without initializers are initialized to undefined.
|
||
var next_numeric_value: ?f64 = 0.0;
|
||
|
||
var value_exprs = ListManaged(Expr).initCapacity(allocator, data.values.len) catch bun.outOfMemory();
|
||
|
||
var all_values_are_pure = true;
|
||
|
||
const exported_members = p.current_scope.ts_namespace.?.exported_members;
|
||
|
||
// We normally don't fold numeric constants because they might increase code
|
||
// size, but it's important to fold numeric constants inside enums since
|
||
// that's what the TypeScript compiler does.
|
||
const old_should_fold_typescript_constant_expressions = p.should_fold_typescript_constant_expressions;
|
||
p.should_fold_typescript_constant_expressions = true;
|
||
|
||
// Create an assignment for each enum value
|
||
for (data.values) |*value| {
|
||
const name = value.name;
|
||
|
||
var has_string_value = false;
|
||
if (value.value) |enum_value| {
|
||
next_numeric_value = null;
|
||
|
||
const visited = p.visitExpr(enum_value);
|
||
|
||
// "See through" any wrapped comments
|
||
const underlying_value = if (visited.data == .e_inlined_enum)
|
||
visited.data.e_inlined_enum.value
|
||
else
|
||
visited;
|
||
value.value = underlying_value;
|
||
|
||
switch (underlying_value.data) {
|
||
.e_number => |num| {
|
||
exported_members.getPtr(name).?.data = .{ .enum_number = num.value };
|
||
|
||
p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
value.ref,
|
||
.{ .enum_number = num.value },
|
||
) catch bun.outOfMemory();
|
||
|
||
next_numeric_value = num.value + 1.0;
|
||
},
|
||
.e_string => |str| {
|
||
has_string_value = true;
|
||
|
||
exported_members.getPtr(name).?.data = .{ .enum_string = str };
|
||
|
||
p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
value.ref,
|
||
.{ .enum_string = str },
|
||
) catch bun.outOfMemory();
|
||
},
|
||
else => {
|
||
if (visited.knownPrimitive() == .string) {
|
||
has_string_value = true;
|
||
}
|
||
|
||
if (!p.exprCanBeRemovedIfUnused(&visited)) {
|
||
all_values_are_pure = false;
|
||
}
|
||
},
|
||
}
|
||
} else if (next_numeric_value) |num| {
|
||
value.value = p.newExpr(E.Number{ .value = num }, value.loc);
|
||
|
||
next_numeric_value = num + 1;
|
||
|
||
exported_members.getPtr(name).?.data = .{ .enum_number = num };
|
||
|
||
p.ref_to_ts_namespace_member.put(
|
||
p.allocator,
|
||
value.ref,
|
||
.{ .enum_number = num },
|
||
) catch bun.outOfMemory();
|
||
} else {
|
||
value.value = p.newExpr(E.Undefined{}, value.loc);
|
||
}
|
||
|
||
const is_assign_target = p.options.features.minify_syntax and bun.js_lexer.isIdentifier(value.name);
|
||
|
||
const name_as_e_string = if (!is_assign_target or !has_string_value)
|
||
p.newExpr(value.nameAsEString(allocator), value.loc)
|
||
else
|
||
null;
|
||
|
||
const assign_target = if (is_assign_target)
|
||
// "Enum.Name = value"
|
||
Expr.assign(
|
||
p.newExpr(E.Dot{
|
||
.target = p.newExpr(
|
||
E.Identifier{ .ref = data.arg },
|
||
value.loc,
|
||
),
|
||
.name = value.name,
|
||
.name_loc = value.loc,
|
||
}, value.loc),
|
||
value.value.?,
|
||
)
|
||
else
|
||
// "Enum['Name'] = value"
|
||
Expr.assign(
|
||
p.newExpr(E.Index{
|
||
.target = p.newExpr(
|
||
E.Identifier{ .ref = data.arg },
|
||
value.loc,
|
||
),
|
||
.index = name_as_e_string.?,
|
||
}, value.loc),
|
||
value.value.?,
|
||
);
|
||
|
||
p.recordUsage(data.arg);
|
||
|
||
// String-valued enums do not form a two-way map
|
||
if (has_string_value) {
|
||
value_exprs.append(assign_target) catch bun.outOfMemory();
|
||
} else {
|
||
// "Enum[assignTarget] = 'Name'"
|
||
value_exprs.append(
|
||
Expr.assign(
|
||
p.newExpr(E.Index{
|
||
.target = p.newExpr(
|
||
E.Identifier{ .ref = data.arg },
|
||
value.loc,
|
||
),
|
||
.index = assign_target,
|
||
}, value.loc),
|
||
name_as_e_string.?,
|
||
),
|
||
) catch bun.outOfMemory();
|
||
p.recordUsage(data.arg);
|
||
}
|
||
}
|
||
|
||
p.should_fold_typescript_constant_expressions = old_should_fold_typescript_constant_expressions;
|
||
|
||
var value_stmts = ListManaged(Stmt).initCapacity(allocator, value_exprs.items.len) catch unreachable;
|
||
// Generate statements from expressions
|
||
for (value_exprs.items) |expr| {
|
||
value_stmts.appendAssumeCapacity(p.s(S.SExpr{ .value = expr }, expr.loc));
|
||
}
|
||
value_exprs.deinit();
|
||
try p.generateClosureForTypeScriptNamespaceOrEnum(
|
||
stmts,
|
||
stmt.loc,
|
||
data.is_export,
|
||
data.name.loc,
|
||
data.name.ref.?,
|
||
data.arg,
|
||
value_stmts.items,
|
||
all_values_are_pure,
|
||
);
|
||
return;
|
||
},
|
||
.s_namespace => |data| {
|
||
p.recordDeclaredSymbol(data.name.ref.?) catch unreachable;
|
||
|
||
// Scan ahead for any variables inside this namespace. This must be done
|
||
// ahead of time before visiting any statements inside the namespace
|
||
// because we may end up visiting the uses before the declarations.
|
||
// We need to convert the uses into property accesses on the namespace.
|
||
for (data.stmts) |child_stmt| {
|
||
switch (child_stmt.data) {
|
||
.s_local => |local| {
|
||
if (local.is_export) {
|
||
p.markExportedDeclsInsideNamespace(data.arg, local.decls.slice());
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
var prepend_temp_refs = PrependTempRefsOpts{ .kind = StmtsKind.fn_body };
|
||
var prepend_list = ListManaged(Stmt).fromOwnedSlice(p.allocator, data.stmts);
|
||
|
||
const old_enclosing_namespace_arg_ref = p.enclosing_namespace_arg_ref;
|
||
p.enclosing_namespace_arg_ref = data.arg;
|
||
p.pushScopeForVisitPass(.entry, stmt.loc) catch unreachable;
|
||
p.recordDeclaredSymbol(data.arg) catch unreachable;
|
||
try p.visitStmtsAndPrependTempRefs(&prepend_list, &prepend_temp_refs);
|
||
p.popScope();
|
||
p.enclosing_namespace_arg_ref = old_enclosing_namespace_arg_ref;
|
||
|
||
try p.generateClosureForTypeScriptNamespaceOrEnum(
|
||
stmts,
|
||
stmt.loc,
|
||
data.is_export,
|
||
data.name.loc,
|
||
data.name.ref.?,
|
||
data.arg,
|
||
prepend_list.items,
|
||
false,
|
||
);
|
||
return;
|
||
},
|
||
else => {
|
||
notimpl();
|
||
},
|
||
}
|
||
|
||
// if we get this far, it stays
|
||
try stmts.append(stmt.*);
|
||
}
|
||
|
||
fn isExportToEliminate(p: *P, ref: Ref) bool {
|
||
const symbol_name = p.loadNameFromRef(ref);
|
||
return p.options.features.replace_exports.contains(symbol_name);
|
||
}
|
||
|
||
fn visitDecls(p: *P, decls: []G.Decl, was_const: bool, comptime is_possibly_decl_to_remove: bool) usize {
|
||
var j: usize = 0;
|
||
var out_decls = decls;
|
||
for (decls) |*decl| {
|
||
p.visitBinding(decl.binding, null);
|
||
|
||
if (decl.value != null) {
|
||
var val = decl.value.?;
|
||
const was_anonymous_named_expr = val.isAnonymousNamed();
|
||
var replacement: ?*const RuntimeFeatures.ReplaceableExport = null;
|
||
|
||
const prev_require_to_convert_count = p.imports_to_convert_from_require.items.len;
|
||
const prev_macro_call_count = p.macro_call_count;
|
||
const orig_dead = p.is_control_flow_dead;
|
||
if (comptime is_possibly_decl_to_remove) {
|
||
if (decl.binding.data == .b_identifier) {
|
||
if (p.options.features.replace_exports.getPtr(p.loadNameFromRef(decl.binding.data.b_identifier.ref))) |replacer| {
|
||
replacement = replacer;
|
||
if (p.options.features.dead_code_elimination and (replacer.* != .replace)) {
|
||
p.is_control_flow_dead = true;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
p.react_refresh.last_hook_seen = null;
|
||
}
|
||
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
decl.value = p.visitExprInOut(val, .{
|
||
.is_immediately_assigned_to_decl = true,
|
||
});
|
||
|
||
if (p.options.features.react_fast_refresh) {
|
||
// When hooks are immediately assigned to something, we need to hash the binding.
|
||
if (p.react_refresh.last_hook_seen) |last_hook| {
|
||
if (decl.value.?.data.as(.e_call)) |call| {
|
||
if (last_hook == call) {
|
||
decl.binding.data.writeToHasher(&p.react_refresh.hook_ctx_storage.?.*.?.hasher, p.symbols.items);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.shouldUnwrapCommonJSToESM()) {
|
||
if (prev_require_to_convert_count < p.imports_to_convert_from_require.items.len) {
|
||
if (decl.binding.data == .b_identifier) {
|
||
const ref = decl.binding.data.b_identifier.ref;
|
||
if (decl.value != null and
|
||
decl.value.?.data == .e_require_string and
|
||
decl.value.?.data.e_require_string.unwrapped_id != std.math.maxInt(u32))
|
||
{
|
||
p.imports_to_convert_from_require.items[decl.value.?.data.e_require_string.unwrapped_id].namespace.ref = ref;
|
||
p.import_items_for_namespace.put(
|
||
p.allocator,
|
||
ref,
|
||
ImportItemForNamespaceMap.init(p.allocator),
|
||
) catch unreachable;
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (comptime is_possibly_decl_to_remove) {
|
||
p.is_control_flow_dead = orig_dead;
|
||
}
|
||
if (comptime is_possibly_decl_to_remove) {
|
||
if (decl.binding.data == .b_identifier) {
|
||
if (replacement) |ptr| {
|
||
if (!p.replaceDeclAndPossiblyRemove(decl, ptr)) {
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
p.visitDecl(
|
||
decl,
|
||
was_anonymous_named_expr,
|
||
was_const and !p.current_scope.is_after_const_local_prefix,
|
||
if (comptime allow_macros)
|
||
prev_macro_call_count != p.macro_call_count
|
||
else
|
||
false,
|
||
);
|
||
} else if (comptime is_possibly_decl_to_remove) {
|
||
if (decl.binding.data == .b_identifier) {
|
||
if (p.options.features.replace_exports.getPtr(p.loadNameFromRef(decl.binding.data.b_identifier.ref))) |ptr| {
|
||
if (!p.replaceDeclAndPossiblyRemove(decl, ptr)) {
|
||
p.visitDecl(
|
||
decl,
|
||
was_const and !p.current_scope.is_after_const_local_prefix,
|
||
false,
|
||
false,
|
||
);
|
||
} else {
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
out_decls[j] = decl.*;
|
||
j += 1;
|
||
}
|
||
|
||
return j;
|
||
}
|
||
|
||
fn injectReplacementExport(p: *P, stmts: *StmtList, name_ref: Ref, loc: logger.Loc, replacement: *const RuntimeFeatures.ReplaceableExport) bool {
|
||
switch (replacement.*) {
|
||
.delete => return false,
|
||
.replace => |value| {
|
||
const count = stmts.items.len;
|
||
var decls = p.allocator.alloc(G.Decl, 1) catch unreachable;
|
||
|
||
decls[0] = .{ .binding = p.b(B.Identifier{ .ref = name_ref }, loc), .value = value };
|
||
var local = p.s(
|
||
S.Local{
|
||
.is_export = true,
|
||
.decls = Decl.List.init(decls),
|
||
},
|
||
loc,
|
||
);
|
||
p.visitAndAppendStmt(stmts, &local) catch unreachable;
|
||
return count != stmts.items.len;
|
||
},
|
||
.inject => |with| {
|
||
const count = stmts.items.len;
|
||
var decls = p.allocator.alloc(G.Decl, 1) catch unreachable;
|
||
decls[0] = .{
|
||
.binding = p.b(
|
||
B.Identifier{ .ref = p.declareSymbol(.other, loc, with.name) catch unreachable },
|
||
loc,
|
||
),
|
||
.value = with.value,
|
||
};
|
||
|
||
var local = p.s(
|
||
S.Local{
|
||
.is_export = true,
|
||
.decls = Decl.List.init(decls),
|
||
},
|
||
loc,
|
||
);
|
||
p.visitAndAppendStmt(stmts, &local) catch unreachable;
|
||
return count != stmts.items.len;
|
||
},
|
||
}
|
||
}
|
||
|
||
fn replaceDeclAndPossiblyRemove(p: *P, decl: *G.Decl, replacement: *const RuntimeFeatures.ReplaceableExport) bool {
|
||
switch (replacement.*) {
|
||
.delete => return false,
|
||
.replace => |value| {
|
||
decl.*.value = p.visitExpr(value);
|
||
return true;
|
||
},
|
||
.inject => |with| {
|
||
decl.* = .{
|
||
.binding = p.b(
|
||
B.Identifier{ .ref = p.declareSymbol(.other, decl.binding.loc, with.name) catch unreachable },
|
||
decl.binding.loc,
|
||
),
|
||
.value = p.visitExpr(Expr{ .data = with.value.data, .loc = if (decl.value != null) decl.value.?.loc else decl.binding.loc }),
|
||
};
|
||
return true;
|
||
},
|
||
}
|
||
}
|
||
|
||
fn visitBindingAndExprForMacro(p: *P, binding: Binding, expr: Expr) void {
|
||
switch (binding.data) {
|
||
.b_object => |bound_object| {
|
||
if (expr.data == .e_object and
|
||
expr.data.e_object.was_originally_macro)
|
||
{
|
||
var object = expr.data.e_object;
|
||
for (bound_object.properties) |property| {
|
||
if (property.flags.contains(.is_spread)) return;
|
||
}
|
||
var output_properties = object.properties.slice();
|
||
var end: u32 = 0;
|
||
for (bound_object.properties) |property| {
|
||
if (property.key.asStringLiteral(p.allocator)) |name| {
|
||
if (object.asProperty(name)) |query| {
|
||
switch (query.expr.data) {
|
||
.e_object, .e_array => p.visitBindingAndExprForMacro(property.value, query.expr),
|
||
else => {
|
||
if (p.options.features.inlining) {
|
||
if (property.value.data == .b_identifier) {
|
||
p.const_values.put(p.allocator, property.value.data.b_identifier.ref, query.expr) catch unreachable;
|
||
}
|
||
}
|
||
},
|
||
}
|
||
output_properties[end] = output_properties[query.i];
|
||
end += 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
object.properties.len = end;
|
||
}
|
||
},
|
||
.b_array => |bound_array| {
|
||
if (expr.data == .e_array and
|
||
expr.data.e_array.was_originally_macro and !bound_array.has_spread)
|
||
{
|
||
var array = expr.data.e_array;
|
||
|
||
array.items.len = @min(array.items.len, @as(u32, @truncate(bound_array.items.len)));
|
||
for (bound_array.items[0..array.items.len], array.items.slice()) |item, *child_expr| {
|
||
if (item.binding.data == .b_missing) {
|
||
child_expr.* = p.newExpr(E.Missing{}, expr.loc);
|
||
continue;
|
||
}
|
||
|
||
p.visitBindingAndExprForMacro(item.binding, child_expr.*);
|
||
}
|
||
}
|
||
},
|
||
.b_identifier => |id| {
|
||
if (p.options.features.inlining) {
|
||
p.const_values.put(p.allocator, id.ref, expr) catch unreachable;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
fn visitDecl(p: *P, decl: *Decl, was_anonymous_named_expr: bool, could_be_const_value: bool, could_be_macro: bool) void {
|
||
// Optionally preserve the name
|
||
switch (decl.binding.data) {
|
||
.b_identifier => |id| {
|
||
if (could_be_const_value or (allow_macros and could_be_macro)) {
|
||
if (decl.value) |val| {
|
||
if (val.canBeConstValue()) {
|
||
p.const_values.put(p.allocator, id.ref, val) catch unreachable;
|
||
}
|
||
}
|
||
} else {
|
||
p.current_scope.is_after_const_local_prefix = true;
|
||
}
|
||
decl.value = p.maybeKeepExprSymbolName(
|
||
decl.value.?,
|
||
p.symbols.items[id.ref.innerIndex()].original_name,
|
||
was_anonymous_named_expr,
|
||
);
|
||
},
|
||
.b_object, .b_array => {
|
||
if (comptime allow_macros) {
|
||
if (could_be_macro and decl.value != null) {
|
||
p.visitBindingAndExprForMacro(decl.binding, decl.value.?);
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
|
||
pub fn markExportedDeclsInsideNamespace(p: *P, ns_ref: Ref, decls: []G.Decl) void {
|
||
for (decls) |decl| {
|
||
p.markExportedBindingInsideNamespace(ns_ref, decl.binding);
|
||
}
|
||
}
|
||
|
||
pub fn appendIfBodyPreservingScope(p: *P, stmts: *ListManaged(Stmt), body: Stmt) anyerror!void {
|
||
switch (body.data) {
|
||
.s_block => |block| {
|
||
var keep_block = false;
|
||
for (block.stmts) |stmt| {
|
||
if (statementCaresAboutScope(stmt)) {
|
||
keep_block = true;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (!keep_block and block.stmts.len > 0) {
|
||
try stmts.appendSlice(block.stmts);
|
||
return;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
if (statementCaresAboutScope(body)) {
|
||
var block_stmts = try p.allocator.alloc(Stmt, 1);
|
||
block_stmts[0] = body;
|
||
try stmts.append(p.s(S.Block{ .stmts = block_stmts }, body.loc));
|
||
return;
|
||
}
|
||
|
||
try stmts.append(body);
|
||
return;
|
||
}
|
||
|
||
fn markExportedBindingInsideNamespace(p: *P, ref: Ref, binding: BindingNodeIndex) void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |ident| {
|
||
p.is_exported_inside_namespace.put(p.allocator, ident.ref, ref) catch unreachable;
|
||
},
|
||
.b_array => |array| {
|
||
for (array.items) |item| {
|
||
p.markExportedBindingInsideNamespace(ref, item.binding);
|
||
}
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |item| {
|
||
p.markExportedBindingInsideNamespace(ref, item.value);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
fn generateClosureForTypeScriptNamespaceOrEnum(
|
||
p: *P,
|
||
stmts: *ListManaged(Stmt),
|
||
stmt_loc: logger.Loc,
|
||
is_export: bool,
|
||
name_loc: logger.Loc,
|
||
original_name_ref: Ref,
|
||
arg_ref: Ref,
|
||
stmts_inside_closure: []Stmt,
|
||
all_values_are_pure: bool,
|
||
) anyerror!void {
|
||
var name_ref = original_name_ref;
|
||
|
||
// Follow the link chain in case symbols were merged
|
||
var symbol: Symbol = p.symbols.items[name_ref.innerIndex()];
|
||
while (symbol.hasLink()) {
|
||
const link = symbol.link;
|
||
name_ref = link;
|
||
symbol = p.symbols.items[name_ref.innerIndex()];
|
||
}
|
||
const allocator = p.allocator;
|
||
|
||
// Make sure to only emit a variable once for a given namespace, since there
|
||
// can be multiple namespace blocks for the same namespace
|
||
if ((symbol.kind == .ts_namespace or symbol.kind == .ts_enum) and
|
||
!p.emitted_namespace_vars.contains(name_ref))
|
||
{
|
||
p.emitted_namespace_vars.putNoClobber(allocator, name_ref, {}) catch bun.outOfMemory();
|
||
|
||
var decls = allocator.alloc(G.Decl, 1) catch bun.outOfMemory();
|
||
decls[0] = G.Decl{ .binding = p.b(B.Identifier{ .ref = name_ref }, name_loc) };
|
||
|
||
if (p.enclosing_namespace_arg_ref == null) {
|
||
// Top-level namespace: "var"
|
||
stmts.append(
|
||
p.s(S.Local{
|
||
.kind = .k_var,
|
||
.decls = G.Decl.List.init(decls),
|
||
.is_export = is_export,
|
||
}, stmt_loc),
|
||
) catch bun.outOfMemory();
|
||
} else {
|
||
// Nested namespace: "let"
|
||
stmts.append(
|
||
p.s(S.Local{
|
||
.kind = .k_let,
|
||
.decls = G.Decl.List.init(decls),
|
||
}, stmt_loc),
|
||
) catch bun.outOfMemory();
|
||
}
|
||
}
|
||
|
||
const arg_expr: Expr = arg_expr: {
|
||
// TODO: unsupportedJSFeatures.has(.logical_assignment)
|
||
// If the "||=" operator is supported, our minified output can be slightly smaller
|
||
if (is_export) if (p.enclosing_namespace_arg_ref) |namespace| {
|
||
const name = p.symbols.items[name_ref.innerIndex()].original_name;
|
||
|
||
// "name = (enclosing.name ||= {})"
|
||
p.recordUsage(namespace);
|
||
p.recordUsage(name_ref);
|
||
break :arg_expr Expr.assign(
|
||
Expr.initIdentifier(name_ref, name_loc),
|
||
p.newExpr(E.Binary{
|
||
.op = .bin_logical_or_assign,
|
||
.left = p.newExpr(
|
||
E.Dot{
|
||
.target = Expr.initIdentifier(namespace, name_loc),
|
||
.name = name,
|
||
.name_loc = name_loc,
|
||
},
|
||
name_loc,
|
||
),
|
||
.right = p.newExpr(E.Object{}, name_loc),
|
||
}, name_loc),
|
||
);
|
||
};
|
||
|
||
// "name ||= {}"
|
||
p.recordUsage(name_ref);
|
||
break :arg_expr p.newExpr(E.Binary{
|
||
.op = .bin_logical_or_assign,
|
||
.left = Expr.initIdentifier(name_ref, name_loc),
|
||
.right = p.newExpr(E.Object{}, name_loc),
|
||
}, name_loc);
|
||
};
|
||
|
||
var func_args = allocator.alloc(G.Arg, 1) catch bun.outOfMemory();
|
||
func_args[0] = .{ .binding = p.b(B.Identifier{ .ref = arg_ref }, name_loc) };
|
||
|
||
var args_list = allocator.alloc(ExprNodeIndex, 1) catch bun.outOfMemory();
|
||
args_list[0] = arg_expr;
|
||
|
||
// TODO: if unsupported features includes arrow functions
|
||
// const target = p.newExpr(
|
||
// E.Function{ .func = .{
|
||
// .args = func_args,
|
||
// .name = null,
|
||
// .open_parens_loc = stmt_loc,
|
||
// .body = G.FnBody{
|
||
// .loc = stmt_loc,
|
||
// .stmts = try allocator.dupe(StmtNodeIndex, stmts_inside_closure),
|
||
// },
|
||
// } },
|
||
// stmt_loc,
|
||
// );
|
||
|
||
const target = target: {
|
||
// "(() => { foo() })()" => "(() => foo())()"
|
||
if (p.options.features.minify_syntax and stmts_inside_closure.len == 1) {
|
||
if (stmts_inside_closure[0].data == .s_expr) {
|
||
stmts_inside_closure[0] = p.s(S.Return{
|
||
.value = stmts_inside_closure[0].data.s_expr.value,
|
||
}, stmts_inside_closure[0].loc);
|
||
}
|
||
}
|
||
|
||
break :target p.newExpr(E.Arrow{
|
||
.args = func_args,
|
||
.body = .{
|
||
.loc = stmt_loc,
|
||
.stmts = try allocator.dupe(StmtNodeIndex, stmts_inside_closure),
|
||
},
|
||
.prefer_expr = true,
|
||
}, stmt_loc);
|
||
};
|
||
|
||
// Call the closure with the name object
|
||
const call = p.newExpr(
|
||
E.Call{
|
||
.target = target,
|
||
.args = ExprNodeList.init(args_list),
|
||
// TODO: make these fully tree-shakable. this annotation
|
||
// as-is is incorrect. This would be done by changing all
|
||
// enum wrappers into `var Enum = ...` instead of two
|
||
// separate statements. This way, the @__PURE__ annotation
|
||
// is attached to the variable binding.
|
||
//
|
||
// .can_be_unwrapped_if_unused = all_values_are_pure,
|
||
},
|
||
stmt_loc,
|
||
);
|
||
|
||
const closure = p.s(S.SExpr{
|
||
.value = call,
|
||
.does_not_affect_tree_shaking = all_values_are_pure,
|
||
}, stmt_loc);
|
||
|
||
stmts.append(closure) catch unreachable;
|
||
}
|
||
|
||
fn lowerClass(
|
||
p: *P,
|
||
stmtorexpr: js_ast.StmtOrExpr,
|
||
) []Stmt {
|
||
switch (stmtorexpr) {
|
||
.stmt => |stmt| {
|
||
if (comptime !is_typescript_enabled) {
|
||
if (!stmt.data.s_class.class.has_decorators) {
|
||
var stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||
stmts[0] = stmt;
|
||
return stmts;
|
||
}
|
||
}
|
||
var class = &stmt.data.s_class.class;
|
||
var constructor_function: ?*E.Function = null;
|
||
|
||
var static_decorators = ListManaged(Stmt).init(p.allocator);
|
||
var instance_decorators = ListManaged(Stmt).init(p.allocator);
|
||
var instance_members = ListManaged(Stmt).init(p.allocator);
|
||
var static_members = ListManaged(Stmt).init(p.allocator);
|
||
var class_properties = ListManaged(Property).init(p.allocator);
|
||
|
||
for (class.properties) |*prop| {
|
||
// merge parameter decorators with method decorators
|
||
if (prop.flags.contains(.is_method)) {
|
||
if (prop.value) |prop_value| {
|
||
switch (prop_value.data) {
|
||
.e_function => |func| {
|
||
const is_constructor = (prop.key.?.data == .e_string and prop.key.?.data.e_string.eqlComptime("constructor"));
|
||
|
||
if (is_constructor) constructor_function = func;
|
||
|
||
for (func.func.args, 0..) |arg, i| {
|
||
for (arg.ts_decorators.ptr[0..arg.ts_decorators.len]) |arg_decorator| {
|
||
var decorators = if (is_constructor)
|
||
class.ts_decorators.listManaged(p.allocator)
|
||
else
|
||
prop.ts_decorators.listManaged(p.allocator);
|
||
const args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.Number{ .value = @as(f64, @floatFromInt(i)) }, arg_decorator.loc);
|
||
args[1] = arg_decorator;
|
||
decorators.append(p.callRuntime(arg_decorator.loc, "__legacyDecorateParamTS", args)) catch unreachable;
|
||
if (is_constructor) {
|
||
class.ts_decorators.update(decorators);
|
||
} else {
|
||
prop.ts_decorators.update(decorators);
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => unreachable,
|
||
}
|
||
}
|
||
}
|
||
|
||
// TODO: prop.kind == .declare and prop.value == null
|
||
|
||
if (prop.ts_decorators.len > 0) {
|
||
const descriptor_key = prop.key.?;
|
||
const loc = descriptor_key.loc;
|
||
|
||
// TODO: when we have the `accessor` modifier, add `and !prop.flags.contains(.has_accessor_modifier)` to
|
||
// the if statement.
|
||
const descriptor_kind: Expr = if (!prop.flags.contains(.is_method))
|
||
p.newExpr(E.Undefined{}, loc)
|
||
else
|
||
p.newExpr(E.Null{}, loc);
|
||
|
||
var target: Expr = undefined;
|
||
if (prop.flags.contains(.is_static)) {
|
||
p.recordUsage(class.class_name.?.ref.?);
|
||
target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc);
|
||
} else {
|
||
target = p.newExpr(E.Dot{ .target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc), .name = "prototype", .name_loc = loc }, loc);
|
||
}
|
||
|
||
var array = prop.ts_decorators.listManaged(p.allocator);
|
||
|
||
if (p.options.features.emit_decorator_metadata) {
|
||
{
|
||
// design:type
|
||
var args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.String{ .data = "design:type" }, logger.Loc.Empty);
|
||
args[1] = p.serializeMetadata(prop.ts_metadata) catch unreachable;
|
||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||
}
|
||
{
|
||
// design:paramtypes and design:returntype if method
|
||
if (prop.flags.contains(.is_method)) {
|
||
if (prop.value) |prop_value| {
|
||
{
|
||
var args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.String{ .data = "design:paramtypes" }, logger.Loc.Empty);
|
||
|
||
const method_args = prop_value.data.e_function.func.args;
|
||
|
||
if (method_args.len > 0) {
|
||
var args_array = p.allocator.alloc(Expr, method_args.len) catch unreachable;
|
||
|
||
for (method_args, 0..) |method_arg, i| {
|
||
args_array[i] = p.serializeMetadata(method_arg.ts_metadata) catch unreachable;
|
||
}
|
||
|
||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(args_array) }, logger.Loc.Empty);
|
||
} else {
|
||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
|
||
}
|
||
|
||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||
}
|
||
{
|
||
var args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.String{ .data = "design:returntype" }, logger.Loc.Empty);
|
||
|
||
args[1] = p.serializeMetadata(prop_value.data.e_function.func.return_ts_metadata) catch unreachable;
|
||
|
||
array.append(p.callRuntime(loc, "__legacyMetadataTS", args)) catch unreachable;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
const args = p.allocator.alloc(Expr, 4) catch unreachable;
|
||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, loc);
|
||
args[1] = target;
|
||
args[2] = descriptor_key;
|
||
args[3] = descriptor_kind;
|
||
|
||
const decorator = p.callRuntime(prop.key.?.loc, "__legacyDecorateClassTS", args);
|
||
const decorator_stmt = p.s(S.SExpr{ .value = decorator }, decorator.loc);
|
||
|
||
if (prop.flags.contains(.is_static)) {
|
||
static_decorators.append(decorator_stmt) catch unreachable;
|
||
} else {
|
||
instance_decorators.append(decorator_stmt) catch unreachable;
|
||
}
|
||
}
|
||
|
||
if (prop.kind != .class_static_block and !prop.flags.contains(.is_method) and prop.key.?.data != .e_private_identifier and prop.ts_decorators.len > 0) {
|
||
// remove decorated fields without initializers to avoid assigning undefined.
|
||
const initializer = if (prop.initializer) |initializer_value| initializer_value else continue;
|
||
|
||
var target: Expr = undefined;
|
||
if (prop.flags.contains(.is_static)) {
|
||
p.recordUsage(class.class_name.?.ref.?);
|
||
target = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc);
|
||
} else {
|
||
target = p.newExpr(E.This{}, prop.key.?.loc);
|
||
}
|
||
|
||
if (prop.flags.contains(.is_computed) or prop.key.?.data == .e_number) {
|
||
target = p.newExpr(E.Index{
|
||
.target = target,
|
||
.index = prop.key.?,
|
||
}, prop.key.?.loc);
|
||
} else {
|
||
target = p.newExpr(E.Dot{
|
||
.target = target,
|
||
.name = prop.key.?.data.e_string.data,
|
||
.name_loc = prop.key.?.loc,
|
||
}, prop.key.?.loc);
|
||
}
|
||
|
||
// remove fields with decorators from class body. Move static members outside of class.
|
||
if (prop.flags.contains(.is_static)) {
|
||
static_members.append(Stmt.assign(target, initializer)) catch unreachable;
|
||
} else {
|
||
instance_members.append(Stmt.assign(target, initializer)) catch unreachable;
|
||
}
|
||
continue;
|
||
}
|
||
|
||
class_properties.append(prop.*) catch unreachable;
|
||
}
|
||
|
||
class.properties = class_properties.items;
|
||
|
||
if (instance_members.items.len > 0) {
|
||
if (constructor_function == null) {
|
||
var properties = ListManaged(Property).fromOwnedSlice(p.allocator, class.properties);
|
||
var constructor_stmts = ListManaged(Stmt).init(p.allocator);
|
||
|
||
if (class.extends != null) {
|
||
const target = p.newExpr(E.Super{}, stmt.loc);
|
||
const arguments_ref = p.newSymbol(.unbound, arguments_str) catch unreachable;
|
||
p.current_scope.generated.push(p.allocator, arguments_ref) catch unreachable;
|
||
|
||
const super = p.newExpr(E.Spread{ .value = p.newExpr(E.Identifier{ .ref = arguments_ref }, stmt.loc) }, stmt.loc);
|
||
const args = ExprNodeList.one(p.allocator, super) catch unreachable;
|
||
|
||
constructor_stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{ .target = target, .args = args }, stmt.loc) }, stmt.loc)) catch unreachable;
|
||
}
|
||
|
||
constructor_stmts.appendSlice(instance_members.items) catch unreachable;
|
||
|
||
properties.insert(0, G.Property{
|
||
.flags = Flags.Property.init(.{ .is_method = true }),
|
||
.key = p.newExpr(E.String{ .data = "constructor" }, stmt.loc),
|
||
.value = p.newExpr(E.Function{ .func = G.Fn{
|
||
.name = null,
|
||
.open_parens_loc = logger.Loc.Empty,
|
||
.args = &[_]Arg{},
|
||
.body = .{ .loc = stmt.loc, .stmts = constructor_stmts.items },
|
||
.flags = Flags.Function.init(.{}),
|
||
} }, stmt.loc),
|
||
}) catch unreachable;
|
||
|
||
class.properties = properties.items;
|
||
} else {
|
||
var constructor_stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, constructor_function.?.func.body.stmts);
|
||
// statements coming from class body inserted after super call or beginning of constructor.
|
||
var super_index: ?usize = null;
|
||
for (constructor_stmts.items, 0..) |item, index| {
|
||
if (item.data != .s_expr or item.data.s_expr.value.data != .e_call or item.data.s_expr.value.data.e_call.target.data != .e_super) continue;
|
||
super_index = index;
|
||
break;
|
||
}
|
||
|
||
const i = if (super_index) |j| j + 1 else 0;
|
||
constructor_stmts.insertSlice(i, instance_members.items) catch unreachable;
|
||
|
||
constructor_function.?.func.body.stmts = constructor_stmts.items;
|
||
}
|
||
|
||
// TODO: make sure "super()" comes before instance field initializers
|
||
// https://github.com/evanw/esbuild/blob/e9413cc4f7ab87263ea244a999c6fa1f1e34dc65/internal/js_parser/js_parser_lower.go#L2742
|
||
}
|
||
|
||
var stmts_count: usize = 1 + static_members.items.len + instance_decorators.items.len + static_decorators.items.len;
|
||
if (class.ts_decorators.len > 0) stmts_count += 1;
|
||
var stmts = ListManaged(Stmt).initCapacity(p.allocator, stmts_count) catch unreachable;
|
||
stmts.appendAssumeCapacity(stmt);
|
||
stmts.appendSliceAssumeCapacity(static_members.items);
|
||
stmts.appendSliceAssumeCapacity(instance_decorators.items);
|
||
stmts.appendSliceAssumeCapacity(static_decorators.items);
|
||
if (class.ts_decorators.len > 0) {
|
||
var array = class.ts_decorators.listManaged(p.allocator);
|
||
|
||
if (p.options.features.emit_decorator_metadata) {
|
||
if (constructor_function != null) {
|
||
// design:paramtypes
|
||
var args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.String{ .data = "design:paramtypes" }, logger.Loc.Empty);
|
||
|
||
const constructor_args = constructor_function.?.func.args;
|
||
if (constructor_args.len > 0) {
|
||
var param_array = p.allocator.alloc(Expr, constructor_args.len) catch unreachable;
|
||
|
||
for (constructor_args, 0..) |constructor_arg, i| {
|
||
param_array[i] = p.serializeMetadata(constructor_arg.ts_metadata) catch unreachable;
|
||
}
|
||
|
||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(param_array) }, logger.Loc.Empty);
|
||
} else {
|
||
args[1] = p.newExpr(E.Array{ .items = ExprNodeList.init(&[_]Expr{}) }, logger.Loc.Empty);
|
||
}
|
||
|
||
array.append(p.callRuntime(stmt.loc, "__legacyMetadataTS", args)) catch unreachable;
|
||
}
|
||
}
|
||
|
||
const args = p.allocator.alloc(Expr, 2) catch unreachable;
|
||
args[0] = p.newExpr(E.Array{ .items = ExprNodeList.init(array.items) }, stmt.loc);
|
||
args[1] = p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc);
|
||
|
||
stmts.appendAssumeCapacity(Stmt.assign(
|
||
p.newExpr(E.Identifier{ .ref = class.class_name.?.ref.? }, class.class_name.?.loc),
|
||
p.callRuntime(stmt.loc, "__legacyDecorateClassTS", args),
|
||
));
|
||
|
||
p.recordUsage(class.class_name.?.ref.?);
|
||
p.recordUsage(class.class_name.?.ref.?);
|
||
}
|
||
return stmts.items;
|
||
},
|
||
.expr => |expr| {
|
||
var stmts = p.allocator.alloc(Stmt, 1) catch unreachable;
|
||
stmts[0] = p.s(S.SExpr{ .value = expr }, expr.loc);
|
||
return stmts;
|
||
},
|
||
}
|
||
}
|
||
|
||
fn serializeMetadata(p: *P, ts_metadata: TypeScript.Metadata) !Expr {
|
||
return switch (ts_metadata) {
|
||
.m_none,
|
||
.m_any,
|
||
.m_unknown,
|
||
.m_object,
|
||
=> p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Object") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
|
||
.m_never,
|
||
.m_undefined,
|
||
.m_null,
|
||
.m_void,
|
||
=> p.newExpr(
|
||
E.Undefined{},
|
||
logger.Loc.Empty,
|
||
),
|
||
|
||
.m_string => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "String") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.m_number => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Number") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.m_function => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Function") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.m_boolean => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Boolean") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.m_array => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Array") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
|
||
.m_bigint => p.maybeDefinedHelper(
|
||
p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "BigInt") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
),
|
||
|
||
.m_symbol => p.maybeDefinedHelper(
|
||
p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Symbol") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
),
|
||
|
||
.m_promise => p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Promise") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
|
||
.m_identifier => |ref| {
|
||
p.recordUsage(ref);
|
||
if (p.is_import_item.contains(ref)) {
|
||
return p.maybeDefinedHelper(p.newExpr(
|
||
E.ImportIdentifier{
|
||
.ref = ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
));
|
||
}
|
||
|
||
return p.maybeDefinedHelper(p.newExpr(
|
||
E.Identifier{ .ref = ref },
|
||
logger.Loc.Empty,
|
||
));
|
||
},
|
||
|
||
.m_dot => |_refs| {
|
||
var refs = _refs;
|
||
bun.assert(refs.items.len >= 2);
|
||
defer refs.deinit(p.allocator);
|
||
|
||
var dots = p.newExpr(
|
||
E.Dot{
|
||
.name = p.loadNameFromRef(refs.items[refs.items.len - 1]),
|
||
.name_loc = logger.Loc.Empty,
|
||
.target = undefined,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
var current_expr = &dots.data.e_dot.target;
|
||
var i: usize = refs.items.len - 2;
|
||
while (i > 0) {
|
||
current_expr.* = p.newExpr(E.Dot{
|
||
.name = p.loadNameFromRef(refs.items[i]),
|
||
.name_loc = logger.Loc.Empty,
|
||
.target = undefined,
|
||
}, logger.Loc.Empty);
|
||
current_expr = ¤t_expr.data.e_dot.target;
|
||
i -= 1;
|
||
}
|
||
|
||
if (p.is_import_item.contains(refs.items[0])) {
|
||
current_expr.* = p.newExpr(
|
||
E.ImportIdentifier{
|
||
.ref = refs.items[0],
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
} else {
|
||
current_expr.* = p.newExpr(
|
||
E.Identifier{
|
||
.ref = refs.items[0],
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
}
|
||
|
||
const dot_identifier = current_expr.*;
|
||
var current_dot = dots;
|
||
|
||
var maybe_defined_dots = p.newExpr(
|
||
E.Binary{
|
||
.op = .bin_logical_or,
|
||
.right = try p.checkIfDefinedHelper(current_dot),
|
||
.left = undefined,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
if (i < refs.items.len - 2) {
|
||
current_dot = current_dot.data.e_dot.target;
|
||
}
|
||
current_expr = &maybe_defined_dots.data.e_binary.left;
|
||
|
||
while (i < refs.items.len - 2) {
|
||
current_expr.* = p.newExpr(
|
||
E.Binary{
|
||
.op = .bin_logical_or,
|
||
.right = try p.checkIfDefinedHelper(current_dot),
|
||
.left = undefined,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
current_expr = ¤t_expr.data.e_binary.left;
|
||
i += 1;
|
||
if (i < refs.items.len - 2) {
|
||
current_dot = current_dot.data.e_dot.target;
|
||
}
|
||
}
|
||
|
||
current_expr.* = try p.checkIfDefinedHelper(dot_identifier);
|
||
|
||
const root = p.newExpr(
|
||
E.If{
|
||
.yes = p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Object") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.no = dots,
|
||
.test_ = maybe_defined_dots,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
return root;
|
||
},
|
||
};
|
||
}
|
||
|
||
fn checkIfDefinedHelper(p: *P, expr: Expr) !Expr {
|
||
return p.newExpr(
|
||
E.Binary{
|
||
.op = .bin_strict_eq,
|
||
.left = p.newExpr(
|
||
E.Unary{
|
||
.op = .un_typeof,
|
||
.value = expr,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.right = p.newExpr(
|
||
E.String{ .data = "undefined" },
|
||
logger.Loc.Empty,
|
||
),
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
}
|
||
|
||
fn maybeDefinedHelper(p: *P, identifier_expr: Expr) !Expr {
|
||
return p.newExpr(
|
||
E.If{
|
||
.test_ = try p.checkIfDefinedHelper(identifier_expr),
|
||
.yes = p.newExpr(
|
||
E.Identifier{
|
||
.ref = (p.findSymbol(logger.Loc.Empty, "Object") catch unreachable).ref,
|
||
},
|
||
logger.Loc.Empty,
|
||
),
|
||
.no = identifier_expr,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
}
|
||
|
||
fn visitForLoopInit(p: *P, stmt: Stmt, is_in_or_of: bool) Stmt {
|
||
switch (stmt.data) {
|
||
.s_expr => |st| {
|
||
const assign_target = if (is_in_or_of) js_ast.AssignTarget.replace else js_ast.AssignTarget.none;
|
||
p.stmt_expr_value = st.value.data;
|
||
st.value = p.visitExprInOut(st.value, ExprIn{ .assign_target = assign_target });
|
||
},
|
||
.s_local => |st| {
|
||
for (st.decls.slice()) |*dec| {
|
||
p.visitBinding(dec.binding, null);
|
||
if (dec.value) |val| {
|
||
dec.value = p.visitExpr(val);
|
||
}
|
||
}
|
||
st.kind = p.selectLocalKind(st.kind);
|
||
},
|
||
else => {
|
||
p.panic("Unexpected stmt in visitForLoopInit: {any}", .{stmt});
|
||
},
|
||
}
|
||
|
||
return stmt;
|
||
}
|
||
|
||
fn wrapIdentifierNamespace(
|
||
p: *P,
|
||
loc: logger.Loc,
|
||
ref: Ref,
|
||
) Expr {
|
||
const enclosing_ref = p.enclosing_namespace_arg_ref.?;
|
||
p.recordUsage(enclosing_ref);
|
||
|
||
return p.newExpr(E.Dot{
|
||
.target = Expr.initIdentifier(enclosing_ref, loc),
|
||
.name = p.symbols.items[ref.innerIndex()].original_name,
|
||
.name_loc = loc,
|
||
}, loc);
|
||
}
|
||
|
||
fn wrapIdentifierHoisting(
|
||
p: *P,
|
||
loc: logger.Loc,
|
||
ref: Ref,
|
||
) Expr {
|
||
// There was a Zig stage1 bug here we had to copy `ref` into a local
|
||
// const variable or else the result would be wrong
|
||
// I remember that bug in particular took hours, possibly days to uncover.
|
||
|
||
p.relocated_top_level_vars.append(p.allocator, LocRef{ .loc = loc, .ref = ref }) catch unreachable;
|
||
p.recordUsage(ref);
|
||
return Expr.initIdentifier(ref, loc);
|
||
}
|
||
|
||
fn wrapInlinedEnum(p: *P, value: Expr, comment: string) Expr {
|
||
if (bun.strings.containsComptime(comment, "*/")) {
|
||
// Don't wrap with a comment
|
||
return value;
|
||
}
|
||
|
||
// Wrap with a comment
|
||
return p.newExpr(E.InlinedEnum{
|
||
.value = value,
|
||
.comment = comment,
|
||
}, value.loc);
|
||
}
|
||
|
||
fn valueForDefine(p: *P, loc: logger.Loc, assign_target: js_ast.AssignTarget, is_delete_target: bool, define_data: *const DefineData) Expr {
|
||
switch (define_data.value) {
|
||
.e_identifier => {
|
||
return p.handleIdentifier(
|
||
loc,
|
||
define_data.value.e_identifier,
|
||
define_data.original_name.?,
|
||
IdentifierOpts{
|
||
.assign_target = assign_target,
|
||
.is_delete_target = is_delete_target,
|
||
.was_originally_identifier = true,
|
||
},
|
||
);
|
||
},
|
||
.e_string => |str| {
|
||
return p.newExpr(str, loc);
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return Expr{
|
||
.data = define_data.value,
|
||
.loc = loc,
|
||
};
|
||
}
|
||
|
||
fn isDotDefineMatch(p: *P, expr: Expr, parts: []const string) bool {
|
||
switch (expr.data) {
|
||
.e_dot => |ex| {
|
||
if (parts.len > 1) {
|
||
if (ex.optional_chain != null) {
|
||
return false;
|
||
}
|
||
|
||
// Intermediates must be dot expressions
|
||
const last = parts.len - 1;
|
||
const is_tail_match = strings.eql(parts[last], ex.name);
|
||
return is_tail_match and p.isDotDefineMatch(ex.target, parts[0..last]);
|
||
}
|
||
},
|
||
.e_import_meta => {
|
||
return (parts.len == 2 and strings.eqlComptime(parts[0], "import") and strings.eqlComptime(parts[1], "meta"));
|
||
},
|
||
// Note: this behavior differs from esbuild
|
||
// esbuild does not try to match index accessors
|
||
// we do, but only if it's a UTF8 string
|
||
// the intent is to handle people using this form instead of E.Dot. So we really only want to do this if the accessor can also be an identifier
|
||
.e_index => |index| {
|
||
if (parts.len > 1 and index.index.data == .e_string and index.index.data.e_string.isUTF8()) {
|
||
if (index.optional_chain != null) {
|
||
return false;
|
||
}
|
||
|
||
const last = parts.len - 1;
|
||
const is_tail_match = strings.eql(parts[last], index.index.data.e_string.slice(p.allocator));
|
||
return is_tail_match and p.isDotDefineMatch(index.target, parts[0..last]);
|
||
}
|
||
},
|
||
.e_identifier => |ex| {
|
||
|
||
// The last expression must be an identifier
|
||
if (parts.len == 1) {
|
||
const name = p.loadNameFromRef(ex.ref);
|
||
if (!strings.eql(name, parts[0])) {
|
||
return false;
|
||
}
|
||
|
||
const result = p.findSymbolWithRecordUsage(expr.loc, name, false) catch return false;
|
||
|
||
// We must not be in a "with" statement scope
|
||
if (result.is_inside_with_scope) {
|
||
return false;
|
||
}
|
||
|
||
// when there's actually no symbol by that name, we return Ref.None
|
||
// If a symbol had already existed by that name, we return .unbound
|
||
return (result.ref.isNull() or p.symbols.items[result.ref.innerIndex()].kind == .unbound);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
fn visitBinding(p: *P, binding: BindingNodeIndex, duplicate_arg_check: ?*StringVoidMap) void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |bind| {
|
||
p.recordDeclaredSymbol(bind.ref) catch unreachable;
|
||
const name = p.symbols.items[bind.ref.innerIndex()].original_name;
|
||
if (isEvalOrArguments(name)) {
|
||
p.markStrictModeFeature(.eval_or_arguments, js_lexer.rangeOfIdentifier(p.source, binding.loc), name) catch unreachable;
|
||
}
|
||
if (duplicate_arg_check) |dup| {
|
||
if (dup.getOrPutContains(name)) {
|
||
p.log.addRangeErrorFmt(
|
||
p.source,
|
||
js_lexer.rangeOfIdentifier(p.source, binding.loc),
|
||
p.allocator,
|
||
"\"{s}\" cannot be bound multiple times in the same parameter list",
|
||
.{name},
|
||
) catch unreachable;
|
||
}
|
||
}
|
||
},
|
||
.b_array => |bind| {
|
||
for (bind.items) |*item| {
|
||
p.visitBinding(item.binding, duplicate_arg_check);
|
||
if (item.default_value) |default_value| {
|
||
const was_anonymous_named_expr = default_value.isAnonymousNamed();
|
||
item.default_value = p.visitExpr(default_value);
|
||
|
||
switch (item.binding.data) {
|
||
.b_identifier => |bind_| {
|
||
item.default_value = p.maybeKeepExprSymbolName(
|
||
item.default_value orelse unreachable,
|
||
p.symbols.items[bind_.ref.innerIndex()].original_name,
|
||
was_anonymous_named_expr,
|
||
);
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.b_object => |bind| {
|
||
for (bind.properties) |*property| {
|
||
if (!property.flags.contains(.is_spread)) {
|
||
property.key = p.visitExpr(property.key);
|
||
}
|
||
|
||
p.visitBinding(property.value, duplicate_arg_check);
|
||
if (property.default_value) |default_value| {
|
||
const was_anonymous_named_expr = default_value.isAnonymousNamed();
|
||
property.default_value = p.visitExpr(default_value);
|
||
|
||
switch (property.value.data) {
|
||
.b_identifier => |bind_| {
|
||
property.default_value = p.maybeKeepExprSymbolName(
|
||
property.default_value orelse unreachable,
|
||
p.symbols.items[bind_.ref.innerIndex()].original_name,
|
||
was_anonymous_named_expr,
|
||
);
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
fn visitLoopBody(p: *P, stmt: StmtNodeIndex) StmtNodeIndex {
|
||
const old_is_inside_loop = p.fn_or_arrow_data_visit.is_inside_loop;
|
||
p.fn_or_arrow_data_visit.is_inside_loop = true;
|
||
p.loop_body = stmt.data;
|
||
const res = p.visitSingleStmt(stmt, .loop_body);
|
||
p.fn_or_arrow_data_visit.is_inside_loop = old_is_inside_loop;
|
||
return res;
|
||
}
|
||
|
||
fn visitSingleStmt(p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
|
||
if (stmt.data == .s_block) {
|
||
var new_stmt = stmt;
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
var stmts = ListManaged(Stmt).initCapacity(p.allocator, stmt.data.s_block.stmts.len) catch unreachable;
|
||
stmts.appendSlice(stmt.data.s_block.stmts) catch unreachable;
|
||
p.visitStmts(&stmts, kind) catch unreachable;
|
||
p.popScope();
|
||
new_stmt.data.s_block.stmts = stmts.items;
|
||
if (p.options.features.minify_syntax) {
|
||
new_stmt = p.stmtsToSingleStmt(stmt.loc, stmts.items);
|
||
}
|
||
|
||
return new_stmt;
|
||
}
|
||
|
||
const has_if_scope = switch (stmt.data) {
|
||
.s_function => stmt.data.s_function.func.flags.contains(.has_if_scope),
|
||
else => false,
|
||
};
|
||
|
||
// Introduce a fake block scope for function declarations inside if statements
|
||
if (has_if_scope) {
|
||
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
|
||
}
|
||
|
||
var stmts = ListManaged(Stmt).initCapacity(p.allocator, 1) catch unreachable;
|
||
stmts.append(stmt) catch unreachable;
|
||
p.visitStmts(&stmts, kind) catch unreachable;
|
||
|
||
if (has_if_scope) {
|
||
p.popScope();
|
||
}
|
||
|
||
return p.stmtsToSingleStmt(stmt.loc, stmts.items);
|
||
}
|
||
|
||
// One statement could potentially expand to several statements
|
||
fn stmtsToSingleStmt(p: *P, loc: logger.Loc, stmts: []Stmt) Stmt {
|
||
if (stmts.len == 0) {
|
||
return Stmt{ .data = Prefill.Data.SEmpty, .loc = loc };
|
||
}
|
||
|
||
if (stmts.len == 1 and !statementCaresAboutScope(stmts[0])) {
|
||
// "let" and "const" must be put in a block when in a single-statement context
|
||
return stmts[0];
|
||
}
|
||
|
||
return p.s(S.Block{ .stmts = stmts }, loc);
|
||
}
|
||
|
||
fn findLabelSymbol(p: *P, loc: logger.Loc, name: string) FindLabelSymbolResult {
|
||
var res = FindLabelSymbolResult{ .ref = Ref.None, .is_loop = false };
|
||
|
||
var _scope: ?*Scope = p.current_scope;
|
||
|
||
while (_scope != null and !_scope.?.kindStopsHoisting()) : (_scope = _scope.?.parent.?) {
|
||
const scope = _scope orelse unreachable;
|
||
const label_ref = scope.label_ref orelse continue;
|
||
if (scope.kind == .label and strings.eql(name, p.symbols.items[label_ref.innerIndex()].original_name)) {
|
||
// Track how many times we've referenced this symbol
|
||
p.recordUsage(label_ref);
|
||
res.ref = label_ref;
|
||
res.is_loop = scope.label_stmt_is_loop;
|
||
res.found = true;
|
||
return res;
|
||
}
|
||
}
|
||
|
||
const r = js_lexer.rangeOfIdentifier(p.source, loc);
|
||
p.log.addRangeErrorFmt(p.source, r, p.allocator, "There is no containing label named \"{s}\"", .{name}) catch unreachable;
|
||
|
||
// Allocate an "unbound" symbol
|
||
const ref = p.newSymbol(.unbound, name) catch unreachable;
|
||
|
||
// Track how many times we've referenced this symbol
|
||
p.recordUsage(ref);
|
||
|
||
return res;
|
||
}
|
||
|
||
fn visitClass(p: *P, name_scope_loc: logger.Loc, class: *G.Class, default_name_ref: Ref) Ref {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
|
||
class.ts_decorators = p.visitTSDecorators(class.ts_decorators);
|
||
|
||
if (class.class_name) |name| {
|
||
p.recordDeclaredSymbol(name.ref.?) catch unreachable;
|
||
}
|
||
|
||
p.pushScopeForVisitPass(.class_name, name_scope_loc) catch unreachable;
|
||
const old_enclosing_class_keyword = p.enclosing_class_keyword;
|
||
p.enclosing_class_keyword = class.class_keyword;
|
||
p.current_scope.recursiveSetStrictMode(.implicit_strict_mode_class);
|
||
var shadow_ref = Ref.None;
|
||
|
||
// Insert a shadowing name that spans the whole class, which matches
|
||
// JavaScript's semantics. The class body (and extends clause) "captures" the
|
||
// original value of the name. This matters for class statements because the
|
||
// symbol can be re-assigned to something else later. The captured values
|
||
// must be the original value of the name, not the re-assigned value.
|
||
// Use "const" for this symbol to match JavaScript run-time semantics. You
|
||
// are not allowed to assign to this symbol (it throws a TypeError).
|
||
if (class.class_name) |name| {
|
||
shadow_ref = name.ref.?;
|
||
p.current_scope.members.put(
|
||
p.allocator,
|
||
p.symbols.items[shadow_ref.innerIndex()].original_name,
|
||
Scope.Member{ .ref = name.ref orelse Ref.None, .loc = name.loc },
|
||
) catch unreachable;
|
||
} else {
|
||
const name_str: []const u8 = if (default_name_ref.isNull()) "_this" else "_default";
|
||
shadow_ref = p.newSymbol(.constant, name_str) catch unreachable;
|
||
}
|
||
|
||
p.recordDeclaredSymbol(shadow_ref) catch unreachable;
|
||
|
||
if (class.extends) |extends| {
|
||
class.extends = p.visitExpr(extends);
|
||
}
|
||
|
||
{
|
||
p.pushScopeForVisitPass(.class_body, class.body_loc) catch unreachable;
|
||
defer {
|
||
p.popScope();
|
||
p.enclosing_class_keyword = old_enclosing_class_keyword;
|
||
}
|
||
|
||
var constructor_function: ?*E.Function = null;
|
||
for (class.properties) |*property| {
|
||
if (property.kind == .class_static_block) {
|
||
const old_fn_or_arrow_data = p.fn_or_arrow_data_visit;
|
||
const old_fn_only_data = p.fn_only_data_visit;
|
||
p.fn_or_arrow_data_visit = .{};
|
||
p.fn_only_data_visit = .{
|
||
.is_this_nested = true,
|
||
.is_new_target_allowed = true,
|
||
.class_name_ref = &shadow_ref,
|
||
|
||
// TODO: down transpilation
|
||
.should_replace_this_with_class_name_ref = false,
|
||
};
|
||
p.pushScopeForVisitPass(.class_static_init, property.class_static_block.?.loc) catch unreachable;
|
||
|
||
// Make it an error to use "arguments" in a static class block
|
||
p.current_scope.forbid_arguments = true;
|
||
|
||
var list = property.class_static_block.?.stmts.listManaged(p.allocator);
|
||
p.visitStmts(&list, .fn_body) catch unreachable;
|
||
property.class_static_block.?.stmts = js_ast.BabyList(Stmt).fromList(list);
|
||
p.popScope();
|
||
|
||
p.fn_or_arrow_data_visit = old_fn_or_arrow_data;
|
||
p.fn_only_data_visit = old_fn_only_data;
|
||
|
||
continue;
|
||
}
|
||
property.ts_decorators = p.visitTSDecorators(property.ts_decorators);
|
||
const is_private = if (property.key != null) @as(Expr.Tag, property.key.?.data) == .e_private_identifier else false;
|
||
|
||
// Special-case EPrivateIdentifier to allow it here
|
||
|
||
if (is_private) {
|
||
p.recordDeclaredSymbol(property.key.?.data.e_private_identifier.ref) catch unreachable;
|
||
} else if (property.key) |key| {
|
||
property.key = p.visitExpr(key);
|
||
}
|
||
|
||
// Make it an error to use "arguments" in a class body
|
||
p.current_scope.forbid_arguments = true;
|
||
defer p.current_scope.forbid_arguments = false;
|
||
|
||
// The value of "this" is shadowed inside property values
|
||
const old_is_this_captured = p.fn_only_data_visit.is_this_nested;
|
||
const old_class_name_ref = p.fn_only_data_visit.class_name_ref;
|
||
p.fn_only_data_visit.is_this_nested = true;
|
||
p.fn_only_data_visit.is_new_target_allowed = true;
|
||
p.fn_only_data_visit.class_name_ref = &shadow_ref;
|
||
defer p.fn_only_data_visit.is_this_nested = old_is_this_captured;
|
||
defer p.fn_only_data_visit.class_name_ref = old_class_name_ref;
|
||
|
||
// We need to explicitly assign the name to the property initializer if it
|
||
// will be transformed such that it is no longer an inline initializer.
|
||
|
||
var constructor_function_: ?*E.Function = null;
|
||
|
||
var name_to_keep: ?string = null;
|
||
if (is_private) {} else if (!property.flags.contains(.is_method) and !property.flags.contains(.is_computed)) {
|
||
if (property.key) |key| {
|
||
if (@as(Expr.Tag, key.data) == .e_string) {
|
||
name_to_keep = key.data.e_string.string(p.allocator) catch unreachable;
|
||
}
|
||
}
|
||
} else if (property.flags.contains(.is_method)) {
|
||
if (comptime is_typescript_enabled) {
|
||
if (property.value.?.data == .e_function and property.key.?.data == .e_string and
|
||
property.key.?.data.e_string.eqlComptime("constructor"))
|
||
{
|
||
constructor_function_ = property.value.?.data.e_function;
|
||
constructor_function = constructor_function_;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (property.value) |val| {
|
||
if (name_to_keep) |name| {
|
||
const was_anon = val.isAnonymousNamed();
|
||
property.value = p.maybeKeepExprSymbolName(p.visitExpr(val), name, was_anon);
|
||
} else {
|
||
property.value = p.visitExpr(val);
|
||
}
|
||
|
||
if (comptime is_typescript_enabled) {
|
||
if (constructor_function_ != null and property.value != null and property.value.?.data == .e_function) {
|
||
constructor_function = property.value.?.data.e_function;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (property.initializer) |val| {
|
||
// if (property.flags.is_static and )
|
||
if (name_to_keep) |name| {
|
||
const was_anon = val.isAnonymousNamed();
|
||
property.initializer = p.maybeKeepExprSymbolName(p.visitExpr(val), name, was_anon);
|
||
} else {
|
||
property.initializer = p.visitExpr(val);
|
||
}
|
||
}
|
||
}
|
||
|
||
// note: our version assumes useDefineForClassFields is true
|
||
if (comptime is_typescript_enabled) {
|
||
if (constructor_function) |constructor| {
|
||
var to_add: usize = 0;
|
||
for (constructor.func.args) |arg| {
|
||
to_add += @intFromBool(arg.is_typescript_ctor_field and arg.binding.data == .b_identifier);
|
||
}
|
||
|
||
// if this is an expression, we can move statements after super() because there will be 0 decorators
|
||
var super_index: ?usize = null;
|
||
if (class.extends != null) {
|
||
for (constructor.func.body.stmts, 0..) |stmt, index| {
|
||
if (stmt.data != .s_expr or stmt.data.s_expr.value.data != .e_call or stmt.data.s_expr.value.data.e_call.target.data != .e_super) continue;
|
||
super_index = index;
|
||
break;
|
||
}
|
||
}
|
||
|
||
if (to_add > 0) {
|
||
// to match typescript behavior, we also must prepend to the class body
|
||
var stmts = std.ArrayList(Stmt).fromOwnedSlice(p.allocator, constructor.func.body.stmts);
|
||
stmts.ensureUnusedCapacity(to_add) catch unreachable;
|
||
var class_body = std.ArrayList(G.Property).fromOwnedSlice(p.allocator, class.properties);
|
||
class_body.ensureUnusedCapacity(to_add) catch unreachable;
|
||
var j: usize = 0;
|
||
|
||
for (constructor.func.args) |arg| {
|
||
if (arg.is_typescript_ctor_field) {
|
||
switch (arg.binding.data) {
|
||
.b_identifier => |id| {
|
||
const arg_symbol = p.symbols.items[id.ref.innerIndex()];
|
||
const name = arg_symbol.original_name;
|
||
const arg_ident = p.newExpr(E.Identifier{ .ref = id.ref }, arg.binding.loc);
|
||
|
||
stmts.insert(if (super_index) |k| j + k + 1 else j, Stmt.assign(
|
||
p.newExpr(E.Dot{
|
||
.target = p.newExpr(E.This{}, arg.binding.loc),
|
||
.name = name,
|
||
.name_loc = arg.binding.loc,
|
||
}, arg.binding.loc),
|
||
arg_ident,
|
||
)) catch unreachable;
|
||
// O(N)
|
||
class_body.items.len += 1;
|
||
bun.copy(G.Property, class_body.items[j + 1 ..], class_body.items[j .. class_body.items.len - 1]);
|
||
// Copy the argument name symbol to prevent the class field declaration from being renamed
|
||
// but not the constructor argument.
|
||
const field_symbol_ref = p.declareSymbol(.other, arg.binding.loc, name) catch id.ref;
|
||
field_symbol_ref.getSymbol(p.symbols.items).must_not_be_renamed = true;
|
||
const field_ident = p.newExpr(E.Identifier{ .ref = field_symbol_ref }, arg.binding.loc);
|
||
class_body.items[j] = G.Property{ .key = field_ident };
|
||
j += 1;
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
|
||
class.properties = class_body.items;
|
||
constructor.func.body.stmts = stmts.items;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.symbols.items[shadow_ref.innerIndex()].use_count_estimate == 0) {
|
||
// If there was originally no class name but something inside needed one
|
||
// (e.g. there was a static property initializer that referenced "this"),
|
||
// store our generated name so the class expression ends up with a name.
|
||
shadow_ref = Ref.None;
|
||
} else if (class.class_name == null) {
|
||
class.class_name = LocRef{
|
||
.ref = shadow_ref,
|
||
.loc = name_scope_loc,
|
||
};
|
||
p.recordDeclaredSymbol(shadow_ref) catch unreachable;
|
||
}
|
||
|
||
// class name scope
|
||
p.popScope();
|
||
|
||
return shadow_ref;
|
||
}
|
||
|
||
fn keepStmtSymbolName(p: *P, loc: logger.Loc, ref: Ref, name: string) Stmt {
|
||
_ = p;
|
||
_ = loc;
|
||
_ = ref;
|
||
_ = name;
|
||
// TODO:
|
||
@compileError("not implemented");
|
||
}
|
||
|
||
fn runtimeIdentifierRef(p: *P, loc: logger.Loc, comptime name: string) Ref {
|
||
p.has_called_runtime = true;
|
||
|
||
if (!p.runtime_imports.contains(name)) {
|
||
if (!p.options.bundle) {
|
||
const generated_symbol = p.declareGeneratedSymbol(.other, name) catch unreachable;
|
||
p.runtime_imports.put(name, generated_symbol);
|
||
return generated_symbol;
|
||
} else {
|
||
const loc_ref = js_ast.LocRef{
|
||
.loc = loc,
|
||
.ref = p.newSymbol(.other, name) catch unreachable,
|
||
};
|
||
p.runtime_imports.put(
|
||
name,
|
||
loc_ref.ref.?,
|
||
);
|
||
p.module_scope.generated.push(p.allocator, loc_ref.ref.?) catch unreachable;
|
||
return loc_ref.ref.?;
|
||
}
|
||
} else {
|
||
return p.runtime_imports.at(name).?;
|
||
}
|
||
}
|
||
|
||
fn runtimeIdentifier(p: *P, loc: logger.Loc, comptime name: string) Expr {
|
||
const ref = p.runtimeIdentifierRef(loc, name);
|
||
p.recordUsage(ref);
|
||
return p.newExpr(
|
||
E.ImportIdentifier{
|
||
.ref = ref,
|
||
.was_originally_identifier = false,
|
||
},
|
||
loc,
|
||
);
|
||
}
|
||
|
||
fn callRuntime(p: *P, loc: logger.Loc, comptime name: string, args: []Expr) Expr {
|
||
return p.newExpr(
|
||
E.Call{
|
||
.target = p.runtimeIdentifier(loc, name),
|
||
.args = ExprNodeList.init(args),
|
||
},
|
||
loc,
|
||
);
|
||
}
|
||
|
||
// Try separating the list for appending, so that it's not a pointer.
|
||
fn visitStmts(p: *P, stmts: *ListManaged(Stmt), kind: StmtsKind) anyerror!void {
|
||
if (only_scan_imports_and_do_not_visit) {
|
||
@compileError("only_scan_imports_and_do_not_visit must not run this.");
|
||
}
|
||
|
||
const initial_scope = if (comptime Environment.allow_assert) p.current_scope else {};
|
||
|
||
{
|
||
// Save the current control-flow liveness. This represents if we are
|
||
// currently inside an "if (false) { ... }" block.
|
||
const old_is_control_flow_dead = p.is_control_flow_dead;
|
||
defer p.is_control_flow_dead = old_is_control_flow_dead;
|
||
|
||
var before = ListManaged(Stmt).init(p.allocator);
|
||
defer before.deinit();
|
||
|
||
var after = ListManaged(Stmt).init(p.allocator);
|
||
defer after.deinit();
|
||
|
||
// Preprocess TypeScript enums to improve code generation. Otherwise
|
||
// uses of an enum before that enum has been declared won't be inlined:
|
||
//
|
||
// console.log(Foo.FOO) // We want "FOO" to be inlined here
|
||
// const enum Foo { FOO = 0 }
|
||
//
|
||
// The TypeScript compiler itself contains code with this pattern, so
|
||
// it's important to implement this optimization.
|
||
var preprocessed_enums: std.ArrayListUnmanaged([]Stmt) = .{};
|
||
defer preprocessed_enums.deinit(p.allocator);
|
||
if (p.scopes_in_order_for_enum.count() > 0) {
|
||
var found: usize = 0;
|
||
for (stmts.items) |*stmt| {
|
||
if (stmt.data == .s_enum) {
|
||
const old_scopes_in_order = p.scope_order_to_visit;
|
||
defer p.scope_order_to_visit = old_scopes_in_order;
|
||
|
||
p.scope_order_to_visit = p.scopes_in_order_for_enum.get(stmt.loc).?;
|
||
|
||
var temp = ListManaged(Stmt).init(p.allocator);
|
||
try p.visitAndAppendStmt(&temp, stmt);
|
||
try preprocessed_enums.append(p.allocator, temp.items);
|
||
found += 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
if (p.current_scope == p.module_scope) {
|
||
p.macro.prepend_stmts = &before;
|
||
}
|
||
|
||
// visit all statements first
|
||
var visited = try ListManaged(Stmt).initCapacity(p.allocator, stmts.items.len);
|
||
defer visited.deinit();
|
||
|
||
const prev_nearest_stmt_list = p.nearest_stmt_list;
|
||
defer p.nearest_stmt_list = prev_nearest_stmt_list;
|
||
p.nearest_stmt_list = &before;
|
||
|
||
var preprocessed_enum_i: usize = 0;
|
||
|
||
for (stmts.items) |*stmt| {
|
||
const list = list_getter: {
|
||
switch (stmt.data) {
|
||
.s_export_equals => {
|
||
// TypeScript "export = value;" becomes "module.exports = value;". This
|
||
// must happen at the end after everything is parsed because TypeScript
|
||
// moves this statement to the end when it generates code.
|
||
break :list_getter &after;
|
||
},
|
||
.s_function => |data| {
|
||
if (
|
||
// Manually hoist block-level function declarations to preserve semantics.
|
||
// This is only done for function declarations that are not generators
|
||
// or async functions, since this is a backwards-compatibility hack from
|
||
// Annex B of the JavaScript standard.
|
||
!p.current_scope.kindStopsHoisting() and
|
||
p.symbols.items[data.func.name.?.ref.?.innerIndex()].kind == .hoisted_function)
|
||
{
|
||
break :list_getter &before;
|
||
}
|
||
},
|
||
.s_enum => {
|
||
const enum_stmts = preprocessed_enums.items[preprocessed_enum_i];
|
||
preprocessed_enum_i += 1;
|
||
try visited.appendSlice(enum_stmts);
|
||
p.scope_order_to_visit = p.scope_order_to_visit[1..];
|
||
continue;
|
||
},
|
||
else => {},
|
||
}
|
||
break :list_getter &visited;
|
||
};
|
||
try p.visitAndAppendStmt(list, stmt);
|
||
}
|
||
|
||
// Transform block-level function declarations into variable declarations
|
||
if (before.items.len > 0) {
|
||
var let_decls = ListManaged(G.Decl).init(p.allocator);
|
||
var var_decls = ListManaged(G.Decl).init(p.allocator);
|
||
var non_fn_stmts = ListManaged(Stmt).init(p.allocator);
|
||
var fn_stmts = std.AutoHashMap(Ref, u32).init(p.allocator);
|
||
|
||
defer {
|
||
non_fn_stmts.deinit();
|
||
fn_stmts.deinit();
|
||
}
|
||
|
||
for (before.items) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_function => |data| {
|
||
// This transformation of function declarations in nested scopes is
|
||
// intended to preserve the hoisting semantics of the original code. In
|
||
// JavaScript, function hoisting works differently in strict mode vs.
|
||
// sloppy mode code. We want the code we generate to use the semantics of
|
||
// the original environment, not the generated environment. However, if
|
||
// direct "eval" is present then it's not possible to preserve the
|
||
// semantics because we need two identifiers to do that and direct "eval"
|
||
// means neither identifier can be renamed to something else. So in that
|
||
// case we give up and do not preserve the semantics of the original code.
|
||
const name_ref = data.func.name.?.ref.?;
|
||
if (p.current_scope.contains_direct_eval) {
|
||
if (p.hoisted_ref_for_sloppy_mode_block_fn.get(name_ref)) |hoisted_ref| {
|
||
// Merge the two identifiers back into a single one
|
||
p.symbols.items[hoisted_ref.innerIndex()].link = name_ref;
|
||
}
|
||
non_fn_stmts.append(stmt) catch bun.outOfMemory();
|
||
continue;
|
||
}
|
||
|
||
const gpe = fn_stmts.getOrPut(name_ref) catch bun.outOfMemory();
|
||
var index = gpe.value_ptr.*;
|
||
if (!gpe.found_existing) {
|
||
index = @as(u32, @intCast(let_decls.items.len));
|
||
gpe.value_ptr.* = index;
|
||
let_decls.append(.{
|
||
.binding = p.b(B.Identifier{
|
||
.ref = name_ref,
|
||
}, data.func.name.?.loc),
|
||
}) catch unreachable;
|
||
|
||
// Also write the function to the hoisted sibling symbol if applicable
|
||
if (p.hoisted_ref_for_sloppy_mode_block_fn.get(name_ref)) |hoisted_ref| {
|
||
p.recordUsage(name_ref);
|
||
var_decls.append(.{
|
||
.binding = p.b(
|
||
B.Identifier{ .ref = hoisted_ref },
|
||
data.func.name.?.loc,
|
||
),
|
||
.value = p.newExpr(
|
||
E.Identifier{
|
||
.ref = name_ref,
|
||
},
|
||
data.func.name.?.loc,
|
||
),
|
||
}) catch bun.outOfMemory();
|
||
}
|
||
}
|
||
|
||
// The last function statement for a given symbol wins
|
||
data.func.name = null;
|
||
let_decls.items[index].value = p.newExpr(
|
||
E.Function{
|
||
.func = data.func,
|
||
},
|
||
stmt.loc,
|
||
);
|
||
},
|
||
else => {
|
||
non_fn_stmts.append(stmt) catch unreachable;
|
||
},
|
||
}
|
||
}
|
||
before.items.len = 0;
|
||
|
||
before.ensureUnusedCapacity(@as(usize, @intFromBool(let_decls.items.len > 0)) + @as(usize, @intFromBool(var_decls.items.len > 0)) + non_fn_stmts.items.len) catch unreachable;
|
||
|
||
if (let_decls.items.len > 0) {
|
||
before.appendAssumeCapacity(p.s(
|
||
S.Local{
|
||
.kind = .k_let,
|
||
.decls = Decl.List.fromList(let_decls),
|
||
},
|
||
let_decls.items[0].value.?.loc,
|
||
));
|
||
}
|
||
|
||
if (var_decls.items.len > 0) {
|
||
const relocated = p.maybeRelocateVarsToTopLevel(var_decls.items, .normal);
|
||
if (relocated.ok) {
|
||
if (relocated.stmt) |new| {
|
||
before.appendAssumeCapacity(new);
|
||
}
|
||
} else {
|
||
before.appendAssumeCapacity(p.s(
|
||
S.Local{
|
||
.kind = .k_var,
|
||
.decls = Decl.List.fromList(var_decls),
|
||
},
|
||
var_decls.items[0].value.?.loc,
|
||
));
|
||
}
|
||
}
|
||
|
||
before.appendSliceAssumeCapacity(non_fn_stmts.items);
|
||
}
|
||
|
||
var visited_count = visited.items.len;
|
||
if (p.is_control_flow_dead) {
|
||
var end: usize = 0;
|
||
for (visited.items) |item| {
|
||
if (!SideEffects.shouldKeepStmtInDeadControlFlow(p, item, p.allocator)) {
|
||
continue;
|
||
}
|
||
|
||
visited.items[end] = item;
|
||
end += 1;
|
||
}
|
||
visited_count = end;
|
||
}
|
||
|
||
const total_size = visited_count + before.items.len + after.items.len;
|
||
|
||
if (total_size != stmts.items.len) {
|
||
try stmts.resize(total_size);
|
||
}
|
||
|
||
var remain = stmts.items;
|
||
|
||
for (before.items) |item| {
|
||
remain[0] = item;
|
||
remain = remain[1..];
|
||
}
|
||
|
||
const visited_slice = visited.items[0..visited_count];
|
||
for (visited_slice) |item| {
|
||
remain[0] = item;
|
||
remain = remain[1..];
|
||
}
|
||
|
||
for (after.items) |item| {
|
||
remain[0] = item;
|
||
remain = remain[1..];
|
||
}
|
||
}
|
||
|
||
// Lower using declarations
|
||
if (kind != .switch_stmt and p.shouldLowerUsingDeclarations(stmts.items)) {
|
||
var ctx = try LowerUsingDeclarationsContext.init(p);
|
||
ctx.scanStmts(p, stmts.items);
|
||
stmts.* = ctx.finalize(p, stmts.items, p.current_scope.parent == null);
|
||
}
|
||
|
||
if (comptime Environment.allow_assert)
|
||
// if this fails it means that scope pushing/popping is not balanced
|
||
assert(p.current_scope == initial_scope);
|
||
|
||
if (!p.options.features.minify_syntax or !p.options.features.dead_code_elimination) {
|
||
return;
|
||
}
|
||
|
||
if (p.current_scope.parent != null and !p.current_scope.contains_direct_eval) {
|
||
// Remove inlined constants now that we know whether any of these statements
|
||
// contained a direct eval() or not. This can't be done earlier when we
|
||
// encounter the constant because we haven't encountered the eval() yet.
|
||
// Inlined constants are not removed if they are in a top-level scope or
|
||
// if they are exported (which could be in a nested TypeScript namespace).
|
||
if (p.const_values.count() > 0) {
|
||
const items: []Stmt = stmts.items;
|
||
for (items) |*stmt| {
|
||
switch (stmt.data) {
|
||
.s_empty, .s_comment, .s_directive, .s_debugger, .s_type_script => continue,
|
||
.s_local => |local| {
|
||
if (!local.is_export and !local.was_commonjs_export) {
|
||
var decls: []Decl = local.decls.slice();
|
||
var end: usize = 0;
|
||
var any_decl_in_const_values = local.kind == .k_const;
|
||
for (decls) |decl| {
|
||
if (decl.binding.data == .b_identifier) {
|
||
if (p.const_values.contains(decl.binding.data.b_identifier.ref)) {
|
||
any_decl_in_const_values = true;
|
||
const symbol = p.symbols.items[decl.binding.data.b_identifier.ref.innerIndex()];
|
||
if (symbol.use_count_estimate == 0) {
|
||
// Skip declarations that are constants with zero usage
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
decls[end] = decl;
|
||
end += 1;
|
||
}
|
||
local.decls.len = @as(u32, @truncate(end));
|
||
if (any_decl_in_const_values) {
|
||
if (end == 0) {
|
||
stmt.* = stmt.*.toEmpty();
|
||
}
|
||
continue;
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
// Break after processing relevant statements
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
|
||
var is_control_flow_dead = false;
|
||
|
||
var output = ListManaged(Stmt).initCapacity(p.allocator, stmts.items.len) catch unreachable;
|
||
|
||
for (stmts.items) |stmt| {
|
||
if (is_control_flow_dead and p.options.features.dead_code_elimination and
|
||
!SideEffects.shouldKeepStmtInDeadControlFlow(p, stmt, p.allocator))
|
||
{
|
||
// Strip unnecessary statements if the control flow is dead here
|
||
continue;
|
||
}
|
||
|
||
// Inline single-use variable declarations where possible:
|
||
//
|
||
// // Before
|
||
// let x = fn();
|
||
// return x.y();
|
||
//
|
||
// // After
|
||
// return fn().y();
|
||
//
|
||
// The declaration must not be exported. We can't just check for the
|
||
// "export" keyword because something might do "export {id};" later on.
|
||
// Instead we just ignore all top-level declarations for now. That means
|
||
// this optimization currently only applies in nested scopes.
|
||
//
|
||
// Ignore declarations if the scope is shadowed by a direct "eval" call.
|
||
// The eval'd code may indirectly reference this symbol and the actual
|
||
// use count may be greater than 1.
|
||
if (p.current_scope != p.module_scope and !p.current_scope.contains_direct_eval) {
|
||
// Keep inlining variables until a failure or until there are none left.
|
||
// That handles cases like this:
|
||
//
|
||
// // Before
|
||
// let x = fn();
|
||
// let y = x.prop;
|
||
// return y;
|
||
//
|
||
// // After
|
||
// return fn().prop;
|
||
//
|
||
inner: while (output.items.len > 0) {
|
||
// Ignore "var" declarations since those have function-level scope and
|
||
// we may not have visited all of their uses yet by this point. We
|
||
// should have visited all the uses of "let" and "const" declarations
|
||
// by now since they are scoped to this block which we just finished
|
||
// visiting.
|
||
const prev_statement = &output.items[output.items.len - 1];
|
||
switch (prev_statement.data) {
|
||
.s_local => {
|
||
var local = prev_statement.data.s_local;
|
||
if (local.decls.len == 0 or local.kind == .k_var or local.is_export) {
|
||
break;
|
||
}
|
||
|
||
const last: *Decl = local.decls.last().?;
|
||
// The variable must be initialized, since we will be substituting
|
||
// the value into the usage.
|
||
if (last.value == null)
|
||
break;
|
||
|
||
// The binding must be an identifier that is only used once.
|
||
// Ignore destructuring bindings since that's not the simple case.
|
||
// Destructuring bindings could potentially execute side-effecting
|
||
// code which would invalidate reordering.
|
||
|
||
switch (last.binding.data) {
|
||
.b_identifier => |ident| {
|
||
const id = ident.ref;
|
||
|
||
const symbol: *const Symbol = &p.symbols.items[id.innerIndex()];
|
||
|
||
// Try to substitute the identifier with the initializer. This will
|
||
// fail if something with side effects is in between the declaration
|
||
// and the usage.
|
||
if (symbol.use_count_estimate == 1) {
|
||
if (p.substituteSingleUseSymbolInStmt(stmt, id, last.value.?)) {
|
||
switch (local.decls.len) {
|
||
1 => {
|
||
local.decls.len = 0;
|
||
output.items.len -= 1;
|
||
continue :inner;
|
||
},
|
||
else => {
|
||
local.decls.len -= 1;
|
||
continue :inner;
|
||
},
|
||
}
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
|
||
// don't merge super calls to ensure they are called before "this" is accessed
|
||
if (stmt.isSuperCall()) {
|
||
output.append(stmt) catch unreachable;
|
||
continue;
|
||
}
|
||
|
||
// The following calls to `joinWithComma` are only enabled during bundling. We do this
|
||
// to avoid changing line numbers too much for source maps
|
||
|
||
switch (stmt.data) {
|
||
.s_empty => continue,
|
||
|
||
// skip directives for now
|
||
.s_directive => continue,
|
||
|
||
.s_local => |local| {
|
||
// Merge adjacent local statements
|
||
if (output.items.len > 0) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_local and
|
||
local.canMergeWith(prev_stmt.data.s_local))
|
||
{
|
||
prev_stmt.data.s_local.decls.append(p.allocator, local.decls.slice()) catch unreachable;
|
||
continue;
|
||
}
|
||
}
|
||
},
|
||
|
||
.s_expr => |s_expr| {
|
||
// Merge adjacent expression statements
|
||
if (output.items.len > 0) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_expr and !prev_stmt.isSuperCall() and p.options.runtimeMergeAdjacentExpressionStatements()) {
|
||
prev_stmt.data.s_expr.does_not_affect_tree_shaking = prev_stmt.data.s_expr.does_not_affect_tree_shaking and
|
||
s_expr.does_not_affect_tree_shaking;
|
||
prev_stmt.data.s_expr.value = prev_stmt.data.s_expr.value.joinWithComma(
|
||
s_expr.value,
|
||
p.allocator,
|
||
);
|
||
continue;
|
||
} else if
|
||
//
|
||
// Input:
|
||
// var f;
|
||
// f = 123;
|
||
// Output:
|
||
// var f = 123;
|
||
//
|
||
// This doesn't handle every case. Only the very simple one.
|
||
(prev_stmt.data == .s_local and
|
||
s_expr.value.data == .e_binary and
|
||
prev_stmt.data.s_local.decls.len == 1 and
|
||
s_expr.value.data.e_binary.op == .bin_assign and
|
||
// we can only do this with var because var is hoisted
|
||
// the statement we are merging into may use the statement before its defined.
|
||
prev_stmt.data.s_local.kind == .k_var)
|
||
{
|
||
var prev_local = prev_stmt.data.s_local;
|
||
const bin_assign = s_expr.value.data.e_binary;
|
||
|
||
if (bin_assign.left.data == .e_identifier) {
|
||
var decl = &prev_local.decls.slice()[0];
|
||
if (decl.binding.data == .b_identifier and
|
||
decl.binding.data.b_identifier.ref.eql(bin_assign.left.data.e_identifier.ref) and
|
||
// If the value was assigned, we shouldn't merge it incase it was used in the current statement
|
||
// https://github.com/oven-sh/bun/issues/2948
|
||
// We don't have a more granular way to check symbol usage so this is the best we can do
|
||
decl.value == null)
|
||
{
|
||
decl.value = bin_assign.right;
|
||
p.ignoreUsage(bin_assign.left.data.e_identifier.ref);
|
||
continue;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
},
|
||
.s_switch => |s_switch| {
|
||
// Absorb a previous expression statement
|
||
if (output.items.len > 0 and p.options.runtimeMergeAdjacentExpressionStatements()) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_expr and !prev_stmt.isSuperCall()) {
|
||
s_switch.test_ = prev_stmt.data.s_expr.value.joinWithComma(s_switch.test_, p.allocator);
|
||
output.items.len -= 1;
|
||
}
|
||
}
|
||
},
|
||
.s_if => |s_if| {
|
||
// Absorb a previous expression statement
|
||
if (output.items.len > 0 and p.options.runtimeMergeAdjacentExpressionStatements()) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_expr and !prev_stmt.isSuperCall()) {
|
||
s_if.test_ = prev_stmt.data.s_expr.value.joinWithComma(s_if.test_, p.allocator);
|
||
output.items.len -= 1;
|
||
}
|
||
}
|
||
|
||
// TODO: optimize jump
|
||
},
|
||
|
||
.s_return => |ret| {
|
||
// Merge return statements with the previous expression statement
|
||
if (output.items.len > 0 and ret.value != null and p.options.runtimeMergeAdjacentExpressionStatements()) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_expr and !prev_stmt.isSuperCall()) {
|
||
ret.value = prev_stmt.data.s_expr.value.joinWithComma(ret.value.?, p.allocator);
|
||
prev_stmt.* = stmt;
|
||
continue;
|
||
}
|
||
}
|
||
|
||
is_control_flow_dead = true;
|
||
},
|
||
|
||
.s_break, .s_continue => {
|
||
is_control_flow_dead = true;
|
||
},
|
||
|
||
.s_throw => {
|
||
// Merge throw statements with the previous expression statement
|
||
if (output.items.len > 0 and p.options.runtimeMergeAdjacentExpressionStatements()) {
|
||
var prev_stmt = &output.items[output.items.len - 1];
|
||
if (prev_stmt.data == .s_expr and !prev_stmt.isSuperCall()) {
|
||
prev_stmt.* = p.s(S.Throw{
|
||
.value = prev_stmt.data.s_expr.value.joinWithComma(
|
||
stmt.data.s_throw.value,
|
||
p.allocator,
|
||
),
|
||
}, stmt.loc);
|
||
continue;
|
||
}
|
||
}
|
||
|
||
is_control_flow_dead = true;
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
|
||
output.append(stmt) catch unreachable;
|
||
}
|
||
|
||
stmts.deinit();
|
||
stmts.* = output;
|
||
}
|
||
|
||
fn extractDeclsForBinding(binding: Binding, decls: *ListManaged(G.Decl)) anyerror!void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => {
|
||
try decls.append(G.Decl{ .binding = binding });
|
||
},
|
||
.b_array => |arr| {
|
||
for (arr.items) |item| {
|
||
extractDeclsForBinding(item.binding, decls) catch unreachable;
|
||
}
|
||
},
|
||
.b_object => |obj| {
|
||
for (obj.properties) |prop| {
|
||
extractDeclsForBinding(prop.value, decls) catch unreachable;
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
pub inline fn @"module.exports"(p: *P, loc: logger.Loc) Expr {
|
||
return p.newExpr(E.Dot{ .name = exports_string_name, .name_loc = loc, .target = p.newExpr(E.Identifier{ .ref = p.module_ref }, loc) }, loc);
|
||
}
|
||
|
||
// This assumes that the open parenthesis has already been parsed by the caller
|
||
pub fn parseParenExpr(p: *P, loc: logger.Loc, level: Level, opts: ParenExprOpts) anyerror!Expr {
|
||
var items_list = ListManaged(Expr).init(p.allocator);
|
||
var errors = DeferredErrors{};
|
||
var arrowArgErrors = DeferredArrowArgErrors{};
|
||
var spread_range = logger.Range{};
|
||
var type_colon_range = logger.Range{};
|
||
var comma_after_spread: ?logger.Loc = null;
|
||
|
||
// Push a scope assuming this is an arrow function. It may not be, in which
|
||
// case we'll need to roll this change back. This has to be done ahead of
|
||
// parsing the arguments instead of later on when we hit the "=>" token and
|
||
// we know it's an arrow function because the arguments may have default
|
||
// values that introduce new scopes and declare new symbols. If this is an
|
||
// arrow function, then those new scopes will need to be parented under the
|
||
// scope of the arrow function itself.
|
||
const scope_index = try p.pushScopeForParsePass(.function_args, loc);
|
||
|
||
// Allow "in" inside parentheses
|
||
const oldAllowIn = p.allow_in;
|
||
p.allow_in = true;
|
||
|
||
// Forbid "await" and "yield", but only for arrow functions
|
||
var old_fn_or_arrow_data = std.mem.toBytes(p.fn_or_arrow_data_parse);
|
||
p.fn_or_arrow_data_parse.arrow_arg_errors = arrowArgErrors;
|
||
p.fn_or_arrow_data_parse.track_arrow_arg_errors = true;
|
||
|
||
// Scan over the comma-separated arguments or expressions
|
||
while (p.lexer.token != .t_close_paren) {
|
||
const is_spread = p.lexer.token == .t_dot_dot_dot;
|
||
|
||
if (is_spread) {
|
||
spread_range = p.lexer.range();
|
||
// p.markSyntaxFeature()
|
||
try p.lexer.next();
|
||
}
|
||
|
||
// We don't know yet whether these are arguments or expressions, so parse
|
||
p.latest_arrow_arg_loc = p.lexer.loc();
|
||
|
||
var item = try p.parseExprOrBindings(.comma, &errors);
|
||
|
||
if (is_spread) {
|
||
item = p.newExpr(E.Spread{ .value = item }, loc);
|
||
}
|
||
|
||
// Skip over types
|
||
if (is_typescript_enabled and p.lexer.token == .t_colon) {
|
||
type_colon_range = p.lexer.range();
|
||
try p.lexer.next();
|
||
try p.skipTypeScriptType(.lowest);
|
||
}
|
||
|
||
// There may be a "=" after the type (but not after an "as" cast)
|
||
if (is_typescript_enabled and p.lexer.token == .t_equals and !p.forbid_suffix_after_as_loc.eql(p.lexer.loc())) {
|
||
try p.lexer.next();
|
||
item = Expr.assign(item, try p.parseExpr(.comma));
|
||
}
|
||
|
||
items_list.append(item) catch unreachable;
|
||
|
||
if (p.lexer.token != .t_comma) {
|
||
break;
|
||
}
|
||
|
||
// Spread arguments must come last. If there's a spread argument followed
|
||
if (is_spread) {
|
||
comma_after_spread = p.lexer.loc();
|
||
}
|
||
|
||
// Eat the comma token
|
||
try p.lexer.next();
|
||
}
|
||
var items = items_list.items;
|
||
|
||
// The parenthetical construct must end with a close parenthesis
|
||
try p.lexer.expect(.t_close_paren);
|
||
|
||
// Restore "in" operator status before we parse the arrow function body
|
||
p.allow_in = oldAllowIn;
|
||
|
||
// Also restore "await" and "yield" expression errors
|
||
p.fn_or_arrow_data_parse = std.mem.bytesToValue(@TypeOf(p.fn_or_arrow_data_parse), &old_fn_or_arrow_data);
|
||
|
||
// Are these arguments to an arrow function?
|
||
if (p.lexer.token == .t_equals_greater_than or opts.force_arrow_fn or (is_typescript_enabled and p.lexer.token == .t_colon)) {
|
||
// Arrow functions are not allowed inside certain expressions
|
||
if (level.gt(.assign)) {
|
||
try p.lexer.unexpected();
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
var invalidLog = LocList.init(p.allocator);
|
||
var args = ListManaged(G.Arg).init(p.allocator);
|
||
|
||
if (opts.is_async) {
|
||
// markl,oweredsyntaxpoksdpokasd
|
||
}
|
||
|
||
// First, try converting the expressions to bindings
|
||
for (items, 0..) |_, i| {
|
||
var is_spread = false;
|
||
switch (items[i].data) {
|
||
.e_spread => |v| {
|
||
is_spread = true;
|
||
items[i] = v.value;
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
var item = items[i];
|
||
const tuple = p.convertExprToBindingAndInitializer(&item, &invalidLog, is_spread);
|
||
// double allocations
|
||
args.append(G.Arg{
|
||
.binding = tuple.binding orelse Binding{ .data = Prefill.Data.BMissing, .loc = item.loc },
|
||
.default = tuple.expr,
|
||
}) catch unreachable;
|
||
}
|
||
|
||
// Avoid parsing TypeScript code like "a ? (1 + 2) : (3 + 4)" as an arrow
|
||
// function. The ":" after the ")" may be a return type annotation, so we
|
||
// attempt to convert the expressions to bindings first before deciding
|
||
// whether this is an arrow function, and only pick an arrow function if
|
||
// there were no conversion errors.
|
||
if (p.lexer.token == .t_equals_greater_than or ((comptime is_typescript_enabled) and
|
||
invalidLog.items.len == 0 and
|
||
p.trySkipTypeScriptArrowReturnTypeWithBacktracking()) or
|
||
opts.force_arrow_fn)
|
||
{
|
||
p.maybeCommaSpreadError(comma_after_spread);
|
||
p.logArrowArgErrors(&arrowArgErrors);
|
||
|
||
// Now that we've decided we're an arrow function, report binding pattern
|
||
// conversion errors
|
||
if (invalidLog.items.len > 0) {
|
||
for (invalidLog.items) |_loc| {
|
||
_loc.addError(
|
||
p.log,
|
||
p.source,
|
||
);
|
||
}
|
||
}
|
||
var arrow_data = FnOrArrowDataParse{
|
||
.allow_await = if (opts.is_async) AwaitOrYield.allow_expr else AwaitOrYield.allow_ident,
|
||
};
|
||
var arrow = try p.parseArrowBody(args.items, &arrow_data);
|
||
arrow.is_async = opts.is_async;
|
||
arrow.has_rest_arg = spread_range.len > 0;
|
||
p.popScope();
|
||
return p.newExpr(arrow, loc);
|
||
}
|
||
}
|
||
|
||
// If we get here, it's not an arrow function so undo the pushing of the
|
||
// scope we did earlier. This needs to flatten any child scopes into the
|
||
// parent scope as if the scope was never pushed in the first place.
|
||
p.popAndFlattenScope(scope_index);
|
||
|
||
// If this isn't an arrow function, then types aren't allowed
|
||
if (type_colon_range.len > 0) {
|
||
try p.log.addRangeError(p.source, type_colon_range, "Unexpected \":\"");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
// Are these arguments for a call to a function named "async"?
|
||
if (opts.is_async) {
|
||
p.logExprErrors(&errors);
|
||
const async_expr = p.newExpr(E.Identifier{ .ref = try p.storeNameInRef("async") }, loc);
|
||
return p.newExpr(E.Call{ .target = async_expr, .args = ExprNodeList.init(items) }, loc);
|
||
}
|
||
|
||
// Is this a chain of expressions and comma operators?
|
||
if (items.len > 0) {
|
||
p.logExprErrors(&errors);
|
||
if (spread_range.len > 0) {
|
||
try p.log.addRangeError(p.source, type_colon_range, "Unexpected \"...\"");
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
var value = Expr.joinAllWithComma(items, p.allocator);
|
||
p.markExprAsParenthesized(&value);
|
||
return value;
|
||
}
|
||
|
||
// Indicate that we expected an arrow function
|
||
try p.lexer.expected(.t_equals_greater_than);
|
||
return error.SyntaxError;
|
||
}
|
||
|
||
// This code is tricky.
|
||
// - Doing it incorrectly will cause segfaults.
|
||
// - Doing it correctly drastically affects runtime performance while parsing larger files
|
||
// The key is in how we remove scopes from the list
|
||
// If we do an orderedRemove, it gets very slow.
|
||
// swapRemove is fast. But a little more dangerous.
|
||
// Instead, we just tombstone it.
|
||
pub fn popAndFlattenScope(p: *P, scope_index: usize) void {
|
||
// Move up to the parent scope
|
||
var to_flatten = p.current_scope;
|
||
var parent = to_flatten.parent.?;
|
||
p.current_scope = parent;
|
||
|
||
// Erase this scope from the order. This will shift over the indices of all
|
||
// the scopes that were created after us. However, we shouldn't have to
|
||
// worry about other code with outstanding scope indices for these scopes.
|
||
// These scopes were all created in between this scope's push and pop
|
||
// operations, so they should all be child scopes and should all be popped
|
||
// by the time we get here.
|
||
p.scopes_in_order.items[scope_index] = null;
|
||
// Decrement the length so that in code with lots of scopes, we use
|
||
// less memory and do less work
|
||
p.scopes_in_order.items.len -= @as(usize, @intFromBool(p.scopes_in_order.items.len == scope_index + 1));
|
||
|
||
// Remove the last child from the parent scope
|
||
const last = parent.children.len - 1;
|
||
if (comptime Environment.allow_assert) assert(parent.children.ptr[last] == to_flatten);
|
||
parent.children.len -|= 1;
|
||
|
||
for (to_flatten.children.slice()) |item| {
|
||
item.parent = parent;
|
||
parent.children.push(p.allocator, item) catch unreachable;
|
||
}
|
||
}
|
||
|
||
fn maybeCommaSpreadError(p: *P, _comma_after_spread: ?logger.Loc) void {
|
||
const comma_after_spread = _comma_after_spread orelse return;
|
||
if (comma_after_spread.start == -1) return;
|
||
|
||
p.log.addRangeError(p.source, logger.Range{ .loc = comma_after_spread, .len = 1 }, "Unexpected \",\" after rest pattern") catch unreachable;
|
||
}
|
||
|
||
/// When not transpiling we dont use the renamer, so our solution is to generate really
|
||
/// hard to collide with variables, instead of actually making things collision free
|
||
pub fn generateTempRef(p: *P, default_name: ?string) Ref {
|
||
return p.generateTempRefWithScope(default_name, p.current_scope);
|
||
}
|
||
|
||
pub fn generateTempRefWithScope(p: *P, default_name: ?string, scope: *Scope) Ref {
|
||
const name = (if (p.willUseRenamer()) default_name else null) orelse brk: {
|
||
p.temp_ref_count += 1;
|
||
break :brk std.fmt.allocPrint(p.allocator, "__bun_temp_ref_{x}$", .{p.temp_ref_count}) catch bun.outOfMemory();
|
||
};
|
||
const ref = p.newSymbol(.other, name) catch bun.outOfMemory();
|
||
|
||
p.temp_refs_to_declare.append(p.allocator, .{
|
||
.ref = ref,
|
||
}) catch bun.outOfMemory();
|
||
|
||
scope.generated.append(p.allocator, &.{ref}) catch bun.outOfMemory();
|
||
|
||
return ref;
|
||
}
|
||
|
||
pub fn computeTsEnumsMap(p: *const P, allocator: Allocator) !js_ast.Ast.TsEnumsMap {
|
||
// When hot module reloading is enabled, we disable enum inlining
|
||
// to avoid making the HMR graph more complicated.
|
||
if (p.options.features.hot_module_reloading)
|
||
return .{};
|
||
|
||
const InlinedEnumValue = js_ast.InlinedEnumValue;
|
||
var map: js_ast.Ast.TsEnumsMap = .{};
|
||
try map.ensureTotalCapacity(allocator, @intCast(p.top_level_enums.items.len));
|
||
for (p.top_level_enums.items) |ref| {
|
||
const entry = p.ref_to_ts_namespace_member.getEntry(ref).?;
|
||
const namespace = entry.value_ptr.namespace;
|
||
var inner_map: bun.StringHashMapUnmanaged(InlinedEnumValue) = .{};
|
||
try inner_map.ensureTotalCapacity(allocator, @intCast(namespace.count()));
|
||
for (namespace.keys(), namespace.values()) |key, val| {
|
||
switch (val.data) {
|
||
.enum_number => |num| inner_map.putAssumeCapacityNoClobber(
|
||
key,
|
||
InlinedEnumValue.encode(.{ .number = num }),
|
||
),
|
||
.enum_string => |str| inner_map.putAssumeCapacityNoClobber(
|
||
key,
|
||
InlinedEnumValue.encode(.{ .string = str }),
|
||
),
|
||
else => continue,
|
||
}
|
||
}
|
||
map.putAssumeCapacity(entry.key_ptr.*, inner_map);
|
||
}
|
||
return map;
|
||
}
|
||
|
||
fn shouldLowerUsingDeclarations(p: *const P, stmts: []Stmt) bool {
|
||
// TODO: We do not support lowering await, but when we do this needs to point to that var
|
||
const lower_await = false;
|
||
|
||
// Check feature flags first, then iterate statements.
|
||
if (!p.options.features.lower_using and !lower_await) return false;
|
||
|
||
for (stmts) |stmt| {
|
||
if (stmt.data == .s_local and
|
||
// Need to re-check lower_using for the k_using case in case lower_await is true
|
||
((stmt.data.s_local.kind == .k_using and p.options.features.lower_using) or
|
||
(stmt.data.s_local.kind == .k_await_using)))
|
||
{
|
||
return true;
|
||
}
|
||
}
|
||
|
||
return false;
|
||
}
|
||
|
||
const LowerUsingDeclarationsContext = struct {
|
||
first_using_loc: logger.Loc,
|
||
stack_ref: Ref,
|
||
has_await_using: bool,
|
||
|
||
pub fn init(p: *P) !LowerUsingDeclarationsContext {
|
||
return LowerUsingDeclarationsContext{
|
||
.first_using_loc = logger.Loc.Empty,
|
||
.stack_ref = p.generateTempRef("__stack"),
|
||
.has_await_using = false,
|
||
};
|
||
}
|
||
|
||
pub fn scanStmts(ctx: *LowerUsingDeclarationsContext, p: *P, stmts: []Stmt) void {
|
||
for (stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_local => |local| {
|
||
if (!local.kind.isUsing()) continue;
|
||
|
||
if (ctx.first_using_loc.isEmpty()) {
|
||
ctx.first_using_loc = stmt.loc;
|
||
}
|
||
if (local.kind == .k_await_using) {
|
||
ctx.has_await_using = true;
|
||
}
|
||
for (local.decls.slice()) |*decl| {
|
||
if (decl.value) |*decl_value| {
|
||
const value_loc = decl_value.loc;
|
||
p.recordUsage(ctx.stack_ref);
|
||
const args = p.allocator.alloc(Expr, 3) catch bun.outOfMemory();
|
||
args[0] = Expr{
|
||
.data = .{ .e_identifier = .{ .ref = ctx.stack_ref } },
|
||
.loc = stmt.loc,
|
||
};
|
||
args[1] = decl_value.*;
|
||
// 1. always pass this param for hopefully better jit performance
|
||
// 2. pass 1 or 0 to be shorter than `true` or `false`
|
||
args[2] = Expr{
|
||
.data = .{ .e_number = .{ .value = if (local.kind == .k_await_using) 1 else 0 } },
|
||
.loc = stmt.loc,
|
||
};
|
||
decl.value = p.callRuntime(value_loc, "__using", args);
|
||
}
|
||
}
|
||
if (p.will_wrap_module_in_try_catch_for_using and p.current_scope.kind == .entry) {
|
||
local.kind = .k_var;
|
||
} else {
|
||
local.kind = .k_const;
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
|
||
pub fn finalize(ctx: *LowerUsingDeclarationsContext, p: *P, stmts: []Stmt, should_hoist_fns: bool) ListManaged(Stmt) {
|
||
var result = ListManaged(Stmt).init(p.allocator);
|
||
var exports = ListManaged(js_ast.ClauseItem).init(p.allocator);
|
||
var end: u32 = 0;
|
||
for (stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_directive, .s_import, .s_export_from, .s_export_star => {
|
||
// These can't go in a try/catch block
|
||
result.append(stmt) catch bun.outOfMemory();
|
||
continue;
|
||
},
|
||
|
||
.s_class => {
|
||
if (stmt.data.s_class.is_export) {
|
||
// can't go in try/catch; hoist out
|
||
result.append(stmt) catch bun.outOfMemory();
|
||
continue;
|
||
}
|
||
},
|
||
|
||
.s_export_default => {
|
||
continue; // this prevents re-exporting default since we already have it as an .s_export_clause
|
||
},
|
||
|
||
.s_export_clause => |data| {
|
||
// Merge export clauses together
|
||
exports.appendSlice(data.items) catch bun.outOfMemory();
|
||
continue;
|
||
},
|
||
|
||
.s_function => {
|
||
if (should_hoist_fns) {
|
||
// Hoist function declarations for cross-file ESM references
|
||
result.append(stmt) catch bun.outOfMemory();
|
||
continue;
|
||
}
|
||
},
|
||
|
||
.s_local => |local| {
|
||
// If any of these are exported, turn it into a "var" and add export clauses
|
||
if (local.is_export) {
|
||
local.is_export = false;
|
||
for (local.decls.slice()) |decl| {
|
||
if (decl.binding.data == .b_identifier) {
|
||
const identifier = decl.binding.data.b_identifier;
|
||
exports.append(js_ast.ClauseItem{
|
||
.name = .{
|
||
.loc = decl.binding.loc,
|
||
.ref = identifier.ref,
|
||
},
|
||
.alias = p.symbols.items[identifier.ref.inner_index].original_name,
|
||
.alias_loc = decl.binding.loc,
|
||
}) catch bun.outOfMemory();
|
||
local.kind = .k_var;
|
||
}
|
||
}
|
||
}
|
||
},
|
||
|
||
else => {},
|
||
}
|
||
|
||
stmts[end] = stmt;
|
||
end += 1;
|
||
}
|
||
|
||
const non_exported_statements = stmts[0..end];
|
||
|
||
const caught_ref = p.generateTempRef("_catch");
|
||
const err_ref = p.generateTempRef("_err");
|
||
const has_err_ref = p.generateTempRef("_hasErr");
|
||
|
||
var scope = p.current_scope;
|
||
while (!scope.kindStopsHoisting()) {
|
||
scope = scope.parent.?;
|
||
}
|
||
|
||
const is_top_level = scope == p.module_scope;
|
||
scope.generated.append(p.allocator, &.{
|
||
ctx.stack_ref,
|
||
caught_ref,
|
||
err_ref,
|
||
has_err_ref,
|
||
}) catch bun.outOfMemory();
|
||
p.declared_symbols.ensureUnusedCapacity(
|
||
p.allocator,
|
||
// 5 to include the _promise decl later on:
|
||
if (ctx.has_await_using) 5 else 4,
|
||
) catch bun.outOfMemory();
|
||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = ctx.stack_ref });
|
||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = caught_ref });
|
||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = err_ref });
|
||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = has_err_ref });
|
||
|
||
const loc = ctx.first_using_loc;
|
||
const call_dispose = call_dispose: {
|
||
p.recordUsage(ctx.stack_ref);
|
||
p.recordUsage(err_ref);
|
||
p.recordUsage(has_err_ref);
|
||
const args = p.allocator.alloc(Expr, 3) catch bun.outOfMemory();
|
||
args[0] = Expr{
|
||
.data = .{ .e_identifier = .{ .ref = ctx.stack_ref } },
|
||
.loc = loc,
|
||
};
|
||
args[1] = Expr{
|
||
.data = .{ .e_identifier = .{ .ref = err_ref } },
|
||
.loc = loc,
|
||
};
|
||
args[2] = Expr{
|
||
.data = .{ .e_identifier = .{ .ref = has_err_ref } },
|
||
.loc = loc,
|
||
};
|
||
break :call_dispose p.callRuntime(loc, "__callDispose", args);
|
||
};
|
||
|
||
const finally_stmts = finally: {
|
||
if (ctx.has_await_using) {
|
||
const promise_ref = p.generateTempRef("_promise");
|
||
scope.generated.append(p.allocator, &.{promise_ref}) catch bun.outOfMemory();
|
||
p.declared_symbols.appendAssumeCapacity(.{ .is_top_level = is_top_level, .ref = promise_ref });
|
||
|
||
const promise_ref_expr = p.newExpr(E.Identifier{ .ref = promise_ref }, loc);
|
||
|
||
const await_expr = p.newExpr(E.Await{
|
||
.value = promise_ref_expr,
|
||
}, loc);
|
||
p.recordUsage(promise_ref);
|
||
|
||
const statements = p.allocator.alloc(Stmt, 2) catch bun.outOfMemory();
|
||
statements[0] = p.s(S.Local{
|
||
.decls = decls: {
|
||
const decls = p.allocator.alloc(Decl, 1) catch bun.outOfMemory();
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = promise_ref }, loc),
|
||
.value = call_dispose,
|
||
};
|
||
break :decls G.Decl.List.init(decls);
|
||
},
|
||
}, loc);
|
||
|
||
// The "await" must not happen if an error was thrown before the
|
||
// "await using", so we conditionally await here:
|
||
//
|
||
// var promise = __callDispose(stack, error, hasError);
|
||
// promise && await promise;
|
||
//
|
||
statements[1] = p.s(S.SExpr{
|
||
.value = p.newExpr(E.Binary{
|
||
.op = .bin_logical_and,
|
||
.left = promise_ref_expr,
|
||
.right = await_expr,
|
||
}, loc),
|
||
}, loc);
|
||
|
||
break :finally statements;
|
||
} else {
|
||
const single = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory();
|
||
single[0] = p.s(S.SExpr{ .value = call_dispose }, call_dispose.loc);
|
||
break :finally single;
|
||
}
|
||
};
|
||
|
||
// Wrap everything in a try/catch/finally block
|
||
p.recordUsage(caught_ref);
|
||
result.ensureUnusedCapacity(2 + @as(usize, @intFromBool(exports.items.len > 0))) catch bun.outOfMemory();
|
||
result.appendAssumeCapacity(p.s(S.Local{
|
||
.decls = decls: {
|
||
const decls = p.allocator.alloc(Decl, 1) catch bun.outOfMemory();
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = ctx.stack_ref }, loc),
|
||
.value = p.newExpr(E.Array{}, loc),
|
||
};
|
||
break :decls G.Decl.List.init(decls);
|
||
},
|
||
.kind = .k_let,
|
||
}, loc));
|
||
result.appendAssumeCapacity(p.s(S.Try{
|
||
.body = non_exported_statements,
|
||
.body_loc = loc,
|
||
.catch_ = .{
|
||
.binding = p.b(B.Identifier{ .ref = caught_ref }, loc),
|
||
.body = catch_body: {
|
||
const statements = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory();
|
||
statements[0] = p.s(S.Local{
|
||
.decls = decls: {
|
||
const decls = p.allocator.alloc(Decl, 2) catch bun.outOfMemory();
|
||
decls[0] = .{
|
||
.binding = p.b(B.Identifier{ .ref = err_ref }, loc),
|
||
.value = p.newExpr(E.Identifier{ .ref = caught_ref }, loc),
|
||
};
|
||
decls[1] = .{
|
||
.binding = p.b(B.Identifier{ .ref = has_err_ref }, loc),
|
||
.value = p.newExpr(E.Number{ .value = 1 }, loc),
|
||
};
|
||
break :decls G.Decl.List.init(decls);
|
||
},
|
||
}, loc);
|
||
break :catch_body statements;
|
||
},
|
||
.body_loc = loc,
|
||
.loc = loc,
|
||
},
|
||
.finally = .{
|
||
.loc = loc,
|
||
.stmts = finally_stmts,
|
||
},
|
||
}, loc));
|
||
|
||
if (exports.items.len > 0) {
|
||
result.appendAssumeCapacity(p.s(S.ExportClause{
|
||
.items = exports.items,
|
||
}, loc));
|
||
}
|
||
|
||
return result;
|
||
}
|
||
};
|
||
|
||
pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref) !void {
|
||
bun.assert(p.options.features.react_fast_refresh);
|
||
bun.assert(p.current_scope == p.module_scope);
|
||
|
||
if (ReactRefresh.isComponentishName(original_name)) {
|
||
// $RefreshReg$(component, "file.ts:Original Name")
|
||
const loc = logger.Loc.Empty;
|
||
try stmts.append(p.s(S.SExpr{ .value = p.newExpr(E.Call{
|
||
.target = Expr.initIdentifier(p.react_refresh.register_ref, loc),
|
||
.args = try ExprNodeList.fromSlice(p.allocator, &.{
|
||
Expr.initIdentifier(ref, loc),
|
||
p.newExpr(E.String{
|
||
.data = try bun.strings.concat(p.allocator, &.{
|
||
p.source.path.pretty,
|
||
":",
|
||
original_name,
|
||
}),
|
||
}, loc),
|
||
}),
|
||
}, loc) }, loc));
|
||
|
||
p.react_refresh.register_used = true;
|
||
}
|
||
}
|
||
|
||
pub fn wrapValueForServerComponentReference(p: *P, val: Expr, original_name: []const u8) Expr {
|
||
bun.assert(p.options.features.server_components.wrapsExports());
|
||
bun.assert(p.current_scope == p.module_scope);
|
||
|
||
if (p.options.features.server_components == .wrap_exports_for_server_reference)
|
||
bun.todoPanic(@src(), "registerServerReference", .{});
|
||
|
||
const module_path = p.newExpr(E.String{
|
||
.data = if (p.options.jsx.development)
|
||
p.source.path.pretty
|
||
else
|
||
bun.todoPanic(@src(), "TODO: unique_key here", .{}),
|
||
}, logger.Loc.Empty);
|
||
|
||
// registerClientReference(
|
||
// Comp,
|
||
// "src/filepath.tsx",
|
||
// "Comp"
|
||
// );
|
||
return p.newExpr(E.Call{
|
||
.target = Expr.initIdentifier(p.server_components_wrap_ref, logger.Loc.Empty),
|
||
.args = js_ast.ExprNodeList.fromSlice(p.allocator, &.{
|
||
val,
|
||
module_path,
|
||
p.newExpr(E.String{ .data = original_name }, logger.Loc.Empty),
|
||
}) catch bun.outOfMemory(),
|
||
}, logger.Loc.Empty);
|
||
}
|
||
|
||
pub fn handleReactRefreshHookCall(p: *P, hook_call: *E.Call, original_name: []const u8) void {
|
||
bun.assert(p.options.features.react_fast_refresh);
|
||
bun.assert(ReactRefresh.isHookName(original_name));
|
||
const ctx_storage = p.react_refresh.hook_ctx_storage orelse
|
||
return; // not in a function, ignore this hook call.
|
||
|
||
// if this function has no hooks recorded, initialize a hook context
|
||
// every function visit provides stack storage, which it will inspect at visit finish.
|
||
const ctx: *ReactRefresh.HookContext = if (ctx_storage.*) |*ctx| ctx else init: {
|
||
p.react_refresh.signature_used = true;
|
||
|
||
var scope = p.current_scope;
|
||
while (scope.kind != .function_body and scope.kind != .block and scope.kind != .entry) {
|
||
scope = scope.parent orelse break;
|
||
}
|
||
|
||
ctx_storage.* = .{
|
||
.hasher = std.hash.Wyhash.init(0),
|
||
.signature_cb = p.generateTempRefWithScope("_s", scope),
|
||
.user_hooks = .{},
|
||
};
|
||
|
||
break :init &(ctx_storage.*.?);
|
||
};
|
||
|
||
ctx.hasher.update(original_name);
|
||
|
||
if (ReactRefresh.built_in_hooks.get(original_name)) |built_in_hook| hash_arg: {
|
||
const arg_index: usize = switch (built_in_hook) {
|
||
// useState first argument is initial state.
|
||
.useState => 0,
|
||
// useReducer second argument is initial state.
|
||
.useReducer => 1,
|
||
else => break :hash_arg,
|
||
};
|
||
if (hook_call.args.len <= arg_index) break :hash_arg;
|
||
const arg = hook_call.args.slice()[arg_index];
|
||
arg.data.writeToHasher(&ctx.hasher, p.symbols.items);
|
||
} else switch (hook_call.target.data) {
|
||
inline .e_identifier,
|
||
.e_import_identifier,
|
||
.e_commonjs_export_identifier,
|
||
=> |id| {
|
||
const gop = ctx.user_hooks.getOrPut(p.allocator, id.ref) catch bun.outOfMemory();
|
||
if (!gop.found_existing) {
|
||
gop.value_ptr.* = Expr.initIdentifier(id.ref, logger.Loc.Empty);
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
|
||
ctx.hasher.update("\x00");
|
||
}
|
||
|
||
pub fn handleReactRefreshPostVisitFunctionBody(p: *P, stmts: *ListManaged(Stmt), hook: *ReactRefresh.HookContext) void {
|
||
bun.assert(p.options.features.react_fast_refresh);
|
||
|
||
// We need to prepend `_s();` as a statement.
|
||
if (stmts.items.len == stmts.capacity) {
|
||
// If the ArrayList does not have enough capacity, it is
|
||
// re-allocated entirely to fit. Only one slot of new capacity
|
||
// is used since we know this statement list is not going to be
|
||
// appended to afterwards; This function is a post-visit handler.
|
||
const new_stmts = p.allocator.alloc(Stmt, stmts.items.len + 1) catch bun.outOfMemory();
|
||
@memcpy(new_stmts[1..], stmts.items);
|
||
stmts.deinit();
|
||
stmts.* = ListManaged(Stmt).fromOwnedSlice(p.allocator, new_stmts);
|
||
} else {
|
||
// The array has enough capacity, so there is no possibility of
|
||
// allocation failure. We just move all of the statements over
|
||
// by one, and increase the length using `addOneAssumeCapacity`
|
||
_ = stmts.addOneAssumeCapacity();
|
||
bun.copy(Stmt, stmts.items[1..], stmts.items[0 .. stmts.items.len - 1]);
|
||
}
|
||
|
||
const loc = logger.Loc.Empty;
|
||
const prepended_stmt = p.s(S.SExpr{ .value = p.newExpr(E.Call{
|
||
.target = Expr.initIdentifier(hook.signature_cb, loc),
|
||
}, loc) }, loc);
|
||
stmts.items[0] = prepended_stmt;
|
||
}
|
||
|
||
pub fn getReactRefreshHookSignalDecl(p: *P, signal_cb_ref: Ref) Stmt {
|
||
const loc = logger.Loc.Empty;
|
||
// var s_ = $RefreshSig$();
|
||
return p.s(S.Local{ .decls = G.Decl.List.fromSlice(p.allocator, &.{.{
|
||
.binding = p.b(B.Identifier{ .ref = signal_cb_ref }, loc),
|
||
.value = p.newExpr(E.Call{
|
||
.target = Expr.initIdentifier(p.react_refresh.create_signature_ref, loc),
|
||
}, loc),
|
||
}}) catch bun.outOfMemory() }, loc);
|
||
}
|
||
|
||
pub fn getReactRefreshHookSignalInit(p: *P, ctx: *ReactRefresh.HookContext, function_with_hook_calls: Expr) Expr {
|
||
const loc = logger.Loc.Empty;
|
||
|
||
const final = ctx.hasher.final();
|
||
const hash_data = p.allocator.alloc(u8, comptime bun.base64.encodeLenFromSize(@sizeOf(@TypeOf(final)))) catch bun.outOfMemory();
|
||
bun.assert(bun.base64.encode(hash_data, std.mem.asBytes(&final)) == hash_data.len);
|
||
|
||
const have_custom_hooks = ctx.user_hooks.count() > 0;
|
||
const have_force_arg = have_custom_hooks or p.react_refresh.force_reset;
|
||
|
||
const args = p.allocator.alloc(
|
||
Expr,
|
||
2 +
|
||
@as(usize, @intFromBool(have_force_arg)) +
|
||
@as(usize, @intFromBool(have_custom_hooks)),
|
||
) catch bun.outOfMemory();
|
||
|
||
args[0] = function_with_hook_calls;
|
||
args[1] = p.newExpr(E.String{ .data = hash_data }, loc);
|
||
|
||
if (have_force_arg) args[2] = p.newExpr(E.Boolean{ .value = p.react_refresh.force_reset }, loc);
|
||
|
||
if (have_custom_hooks) {
|
||
// () => [useCustom1, useCustom2]
|
||
args[3] = p.newExpr(E.Arrow{
|
||
.body = .{
|
||
.stmts = p.allocator.dupe(Stmt, &.{
|
||
p.s(S.Return{ .value = p.newExpr(E.Array{
|
||
.items = ExprNodeList.init(ctx.user_hooks.values()),
|
||
}, loc) }, loc),
|
||
}) catch bun.outOfMemory(),
|
||
.loc = loc,
|
||
},
|
||
.prefer_expr = true,
|
||
}, loc);
|
||
}
|
||
|
||
// _s(func, "<hash>", force, () => [useCustom])
|
||
return p.newExpr(E.Call{
|
||
.target = Expr.initIdentifier(ctx.signature_cb, loc),
|
||
.args = ExprNodeList.init(args),
|
||
}, loc);
|
||
}
|
||
|
||
pub fn toAST(
|
||
p: *P,
|
||
input_parts: []js_ast.Part,
|
||
exports_kind: js_ast.ExportsKind,
|
||
wrap_mode: WrapMode,
|
||
hashbang: []const u8,
|
||
) !js_ast.Ast {
|
||
const allocator = p.allocator;
|
||
var parts = input_parts;
|
||
|
||
// if (p.options.tree_shaking and p.options.features.trim_unused_imports) {
|
||
// p.treeShake(&parts, false);
|
||
// }
|
||
|
||
const bundling = p.options.bundle;
|
||
var parts_end: usize = @as(usize, @intFromBool(bundling));
|
||
|
||
// When bundling with HMR, we need every module to be just a
|
||
// single part, as we later wrap each module into a function,
|
||
// which requires a single part. Otherwise, you'll end up with
|
||
// multiple instances of a module, each with different parts of
|
||
// the file. That is also why tree-shaking is disabled.
|
||
if (p.options.features.hot_module_reloading) {
|
||
bun.assert(!p.options.tree_shaking);
|
||
bun.assert(p.options.features.hot_module_reloading);
|
||
|
||
var hmr_transform_ctx = ConvertESMExportsForHmr{ .last_part = &parts[parts.len - 1] };
|
||
try hmr_transform_ctx.stmts.ensureTotalCapacity(p.allocator, prealloc_count: {
|
||
// get a estimate on how many statements there are going to be
|
||
var count: usize = 0;
|
||
for (parts) |part| count += part.stmts.len;
|
||
break :prealloc_count count + 2;
|
||
});
|
||
|
||
for (parts) |part| {
|
||
// Bake does not care about 'import =', as it handles it on it's own
|
||
_ = try ImportScanner.scan(P, p, part.stmts, wrap_mode != .none, true, &hmr_transform_ctx);
|
||
}
|
||
|
||
parts = try hmr_transform_ctx.finalize(p, parts);
|
||
} else {
|
||
// Handle import paths after the whole file has been visited because we need
|
||
// symbol usage counts to be able to remove unused type-only imports in
|
||
// TypeScript code.
|
||
while (true) {
|
||
var kept_import_equals = false;
|
||
var removed_import_equals = false;
|
||
|
||
const begin = parts_end;
|
||
// Potentially remove some statements, then filter out parts to remove any
|
||
// with no statements
|
||
for (parts[begin..]) |part_| {
|
||
var part = part_;
|
||
p.import_records_for_current_part.clearRetainingCapacity();
|
||
p.declared_symbols.clearRetainingCapacity();
|
||
|
||
const result = try ImportScanner.scan(P, p, part.stmts, wrap_mode != .none, false, {});
|
||
kept_import_equals = kept_import_equals or result.kept_import_equals;
|
||
removed_import_equals = removed_import_equals or result.removed_import_equals;
|
||
|
||
part.stmts = result.stmts;
|
||
if (part.stmts.len > 0) {
|
||
if (p.module_scope.contains_direct_eval and part.declared_symbols.len() > 0) {
|
||
// If this file contains a direct call to "eval()", all parts that
|
||
// declare top-level symbols must be kept since the eval'd code may
|
||
// reference those symbols.
|
||
part.can_be_removed_if_unused = false;
|
||
}
|
||
if (part.declared_symbols.len() == 0) {
|
||
part.declared_symbols = p.declared_symbols.clone(p.allocator) catch unreachable;
|
||
} else {
|
||
part.declared_symbols.appendList(p.allocator, p.declared_symbols) catch unreachable;
|
||
}
|
||
|
||
if (part.import_record_indices.len == 0) {
|
||
part.import_record_indices = @TypeOf(part.import_record_indices).init(
|
||
(p.import_records_for_current_part.clone(p.allocator) catch unreachable).items,
|
||
);
|
||
} else {
|
||
part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items) catch unreachable;
|
||
}
|
||
|
||
parts[parts_end] = part;
|
||
parts_end += 1;
|
||
}
|
||
}
|
||
|
||
// We need to iterate multiple times if an import-equals statement was
|
||
// removed and there are more import-equals statements that may be removed
|
||
if (!kept_import_equals or !removed_import_equals) {
|
||
break;
|
||
}
|
||
}
|
||
|
||
// leave the first part in there for namespace export when bundling
|
||
parts = parts[0..parts_end];
|
||
|
||
// Do a second pass for exported items now that imported items are filled out.
|
||
// This isn't done for HMR because it already deletes all `.s_export_clause`s
|
||
for (parts) |part| {
|
||
for (part.stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_export_clause => |clause| {
|
||
for (clause.items) |item| {
|
||
if (p.named_imports.getEntry(item.name.ref.?)) |_import| {
|
||
_import.value_ptr.is_exported = true;
|
||
}
|
||
}
|
||
},
|
||
else => {},
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
if (wrap_mode == .bun_commonjs and !p.options.features.remove_cjs_module_wrapper) {
|
||
// This transforms the user's code into.
|
||
//
|
||
// (function (exports, require, module, __filename, __dirname) {
|
||
// ...
|
||
// })
|
||
//
|
||
// which is then called in `evaluateCommonJSModuleOnce`
|
||
var args = allocator.alloc(Arg, 5 + @as(usize, @intFromBool(p.has_import_meta))) catch bun.outOfMemory();
|
||
args[0..5].* = .{
|
||
Arg{ .binding = p.b(B.Identifier{ .ref = p.exports_ref }, logger.Loc.Empty) },
|
||
Arg{ .binding = p.b(B.Identifier{ .ref = p.require_ref }, logger.Loc.Empty) },
|
||
Arg{ .binding = p.b(B.Identifier{ .ref = p.module_ref }, logger.Loc.Empty) },
|
||
Arg{ .binding = p.b(B.Identifier{ .ref = p.filename_ref }, logger.Loc.Empty) },
|
||
Arg{ .binding = p.b(B.Identifier{ .ref = p.dirname_ref }, logger.Loc.Empty) },
|
||
};
|
||
if (p.has_import_meta) {
|
||
p.import_meta_ref = p.newSymbol(.other, "$Bun_import_meta") catch bun.outOfMemory();
|
||
args[5] = Arg{ .binding = p.b(B.Identifier{ .ref = p.import_meta_ref }, logger.Loc.Empty) };
|
||
}
|
||
|
||
var total_stmts_count: usize = 0;
|
||
for (parts) |part| {
|
||
total_stmts_count += part.stmts.len;
|
||
}
|
||
|
||
const preserve_strict_mode = p.module_scope.strict_mode == .explicit_strict_mode and
|
||
!(parts.len > 0 and
|
||
parts[0].stmts.len > 0 and
|
||
parts[0].stmts[0].data == .s_directive);
|
||
|
||
total_stmts_count += @as(usize, @intCast(@intFromBool(preserve_strict_mode)));
|
||
|
||
const stmts_to_copy = allocator.alloc(Stmt, total_stmts_count) catch bun.outOfMemory();
|
||
{
|
||
var remaining_stmts = stmts_to_copy;
|
||
if (preserve_strict_mode) {
|
||
remaining_stmts[0] = p.s(
|
||
S.Directive{
|
||
.value = "use strict",
|
||
},
|
||
p.module_scope_directive_loc,
|
||
);
|
||
remaining_stmts = remaining_stmts[1..];
|
||
}
|
||
|
||
for (parts) |part| {
|
||
for (part.stmts, remaining_stmts[0..part.stmts.len]) |src, *dest| {
|
||
dest.* = src;
|
||
}
|
||
remaining_stmts = remaining_stmts[part.stmts.len..];
|
||
}
|
||
}
|
||
|
||
const wrapper = p.newExpr(
|
||
E.Function{
|
||
.func = G.Fn{
|
||
.name = null,
|
||
.open_parens_loc = logger.Loc.Empty,
|
||
.args = args,
|
||
.body = .{ .loc = logger.Loc.Empty, .stmts = stmts_to_copy },
|
||
.flags = Flags.Function.init(.{ .is_export = false }),
|
||
},
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
|
||
var top_level_stmts = p.allocator.alloc(Stmt, 1) catch bun.outOfMemory();
|
||
parts[0].stmts = top_level_stmts;
|
||
top_level_stmts[0] = p.s(
|
||
S.SExpr{
|
||
.value = wrapper,
|
||
},
|
||
logger.Loc.Empty,
|
||
);
|
||
parts.len = 1;
|
||
}
|
||
|
||
var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{};
|
||
var top_level = &top_level_symbols_to_parts;
|
||
|
||
if (p.options.bundle) {
|
||
const Ctx = struct {
|
||
allocator: std.mem.Allocator,
|
||
top_level_symbols_to_parts: *js_ast.Ast.TopLevelSymbolToParts,
|
||
symbols: []const js_ast.Symbol,
|
||
part_index: u32,
|
||
|
||
pub fn next(ctx: @This(), input: Ref) void {
|
||
// If this symbol was merged, use the symbol at the end of the
|
||
// linked list in the map. This is the case for multiple "var"
|
||
// declarations with the same name, for example.
|
||
var ref = input;
|
||
var symbol_ref = &ctx.symbols[ref.innerIndex()];
|
||
while (symbol_ref.hasLink()) : (symbol_ref = &ctx.symbols[ref.innerIndex()]) {
|
||
ref = symbol_ref.link;
|
||
}
|
||
|
||
var entry = ctx.top_level_symbols_to_parts.getOrPut(ctx.allocator, ref) catch unreachable;
|
||
if (!entry.found_existing) {
|
||
entry.value_ptr.* = .{};
|
||
}
|
||
|
||
entry.value_ptr.push(ctx.allocator, @as(u32, @truncate(ctx.part_index))) catch unreachable;
|
||
}
|
||
};
|
||
|
||
// Each part tracks the other parts it depends on within this file
|
||
for (parts, 0..) |*part, part_index| {
|
||
const decls = &part.declared_symbols;
|
||
const ctx = Ctx{
|
||
.allocator = p.allocator,
|
||
.top_level_symbols_to_parts = top_level,
|
||
.symbols = p.symbols.items,
|
||
.part_index = @as(u32, @truncate(part_index)),
|
||
};
|
||
|
||
DeclaredSymbol.forEachTopLevelSymbol(decls, ctx, Ctx.next);
|
||
}
|
||
|
||
// Pulling in the exports of this module always pulls in the export part
|
||
|
||
{
|
||
var entry = top_level.getOrPut(p.allocator, p.exports_ref) catch unreachable;
|
||
|
||
if (!entry.found_existing) {
|
||
entry.value_ptr.* = .{};
|
||
}
|
||
|
||
entry.value_ptr.push(p.allocator, js_ast.namespace_export_part_index) catch unreachable;
|
||
}
|
||
}
|
||
|
||
const wrapper_ref: Ref = brk: {
|
||
if (p.options.bundle and p.needsWrapperRef(parts)) {
|
||
break :brk p.newSymbol(
|
||
.other,
|
||
std.fmt.allocPrint(
|
||
p.allocator,
|
||
"require_{any}",
|
||
.{p.source.fmtIdentifier()},
|
||
) catch bun.outOfMemory(),
|
||
) catch bun.outOfMemory();
|
||
}
|
||
|
||
break :brk Ref.None;
|
||
};
|
||
|
||
var parts_list = bun.BabyList(js_ast.Part).init(parts);
|
||
parts_list.cap = @intCast(input_parts.len);
|
||
|
||
return .{
|
||
.runtime_imports = p.runtime_imports,
|
||
.parts = parts_list,
|
||
.module_scope = p.module_scope.*,
|
||
.symbols = js_ast.Symbol.List.fromList(p.symbols),
|
||
.exports_ref = p.exports_ref,
|
||
.wrapper_ref = wrapper_ref,
|
||
.module_ref = p.module_ref,
|
||
.import_records = ImportRecord.List.fromList(p.import_records),
|
||
.export_star_import_records = p.export_star_import_records.items,
|
||
.approximate_newline_count = p.lexer.approximate_newline_count,
|
||
.exports_kind = exports_kind,
|
||
.named_imports = p.named_imports,
|
||
.named_exports = p.named_exports,
|
||
.import_keyword = p.esm_import_keyword,
|
||
.export_keyword = p.esm_export_keyword,
|
||
.top_level_symbols_to_parts = top_level_symbols_to_parts,
|
||
.char_freq = p.computeCharacterFrequency(),
|
||
.directive = if (p.module_scope.strict_mode == .explicit_strict_mode) "use strict" else null,
|
||
|
||
// Assign slots to symbols in nested scopes. This is some precomputation for
|
||
// the symbol renaming pass that will happen later in the linker. It's done
|
||
// now in the parser because we want it to be done in parallel per file and
|
||
// we're already executing code in parallel here
|
||
.nested_scope_slot_counts = if (p.options.features.minify_identifiers)
|
||
renamer.assignNestedScopeSlots(p.allocator, p.module_scope, p.symbols.items)
|
||
else
|
||
js_ast.SlotCounts{},
|
||
|
||
.require_ref = if (p.runtime_imports.__require != null)
|
||
p.runtime_imports.__require.?
|
||
else
|
||
p.require_ref,
|
||
|
||
.force_cjs_to_esm = p.unwrap_all_requires or exports_kind == .esm_with_dynamic_fallback_from_cjs,
|
||
.uses_module_ref = p.symbols.items[p.module_ref.inner_index].use_count_estimate > 0,
|
||
.uses_exports_ref = p.symbols.items[p.exports_ref.inner_index].use_count_estimate > 0,
|
||
.uses_require_ref = p.runtime_imports.__require != null and
|
||
p.symbols.items[p.runtime_imports.__require.?.inner_index].use_count_estimate > 0,
|
||
.commonjs_module_exports_assigned_deoptimized = p.commonjs_module_exports_assigned_deoptimized,
|
||
.top_level_await_keyword = p.top_level_await_keyword,
|
||
.commonjs_named_exports = p.commonjs_named_exports,
|
||
.has_commonjs_export_names = p.has_commonjs_export_names,
|
||
|
||
.hashbang = hashbang,
|
||
|
||
// TODO: cross-module constant inlining
|
||
// .const_values = p.const_values,
|
||
.ts_enums = try p.computeTsEnumsMap(allocator),
|
||
|
||
.import_meta_ref = p.import_meta_ref,
|
||
};
|
||
}
|
||
|
||
/// The bundler will generate wrappers to contain top-level side effects using
|
||
/// the '__esm' helper. Example:
|
||
///
|
||
/// var init_foo = __esm(() => {
|
||
/// someExport = Math.random();
|
||
/// });
|
||
///
|
||
/// This wrapper can be removed if all of the constructs get moved
|
||
/// outside of the file. Due to paralleization, we can't retroactively
|
||
/// delete the `init_foo` symbol, but instead it must be known far in
|
||
/// advance if the symbol is needed or not.
|
||
///
|
||
/// The logic in this function must be in sync with the hoisting
|
||
/// logic in `LinkerContext.generateCodeForFileInChunkJS`
|
||
fn needsWrapperRef(p: *const P, parts: []const js_ast.Part) bool {
|
||
bun.assert(p.options.bundle);
|
||
for (parts) |part| {
|
||
for (part.stmts) |stmt| {
|
||
switch (stmt.data) {
|
||
.s_function => {},
|
||
.s_class => |class| if (!class.class.canBeMoved()) return true,
|
||
.s_local => |local| {
|
||
if (local.was_commonjs_export or p.commonjs_named_exports.count() == 0) {
|
||
for (local.decls.slice()) |decl| {
|
||
if (decl.value) |value|
|
||
if (value.data != .e_missing and !value.canBeMoved())
|
||
return true;
|
||
}
|
||
continue;
|
||
}
|
||
return true;
|
||
},
|
||
.s_export_default => |ed| {
|
||
if (!ed.canBeMoved())
|
||
return true;
|
||
},
|
||
.s_export_equals => |e| {
|
||
if (!e.value.canBeMoved())
|
||
return true;
|
||
},
|
||
else => return true,
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
pub fn init(
|
||
allocator: Allocator,
|
||
log: *logger.Log,
|
||
source: *const logger.Source,
|
||
define: *Define,
|
||
lexer: js_lexer.Lexer,
|
||
opts: Parser.Options,
|
||
this: *P,
|
||
) anyerror!void {
|
||
var scope_order = try ScopeOrderList.initCapacity(allocator, 1);
|
||
const scope = try allocator.create(Scope);
|
||
scope.* = Scope{
|
||
.members = .{},
|
||
.children = .{},
|
||
.generated = .{},
|
||
.kind = .entry,
|
||
.label_ref = null,
|
||
.parent = null,
|
||
};
|
||
|
||
scope_order.appendAssumeCapacity(ScopeOrder{ .loc = locModuleScope, .scope = scope });
|
||
this.* = P{
|
||
.legacy_cjs_import_stmts = @TypeOf(this.legacy_cjs_import_stmts).init(allocator),
|
||
// This must default to true or else parsing "in" won't work right.
|
||
// It will fail for the case in the "in-keyword.js" file
|
||
.allow_in = true,
|
||
|
||
.call_target = nullExprData,
|
||
.delete_target = nullExprData,
|
||
.stmt_expr_value = nullExprData,
|
||
.loop_body = nullStmtData,
|
||
.define = define,
|
||
.import_records = undefined,
|
||
.named_imports = undefined,
|
||
.named_exports = .{},
|
||
.log = log,
|
||
.allocator = allocator,
|
||
.options = opts,
|
||
.then_catch_chain = ThenCatchChain{ .next_target = nullExprData },
|
||
.to_expr_wrapper_namespace = undefined,
|
||
.to_expr_wrapper_hoisted = undefined,
|
||
.import_transposer = undefined,
|
||
.require_transposer = undefined,
|
||
.require_resolve_transposer = undefined,
|
||
.source = source,
|
||
.macro = MacroState.init(allocator),
|
||
.current_scope = scope,
|
||
.module_scope = scope,
|
||
.scopes_in_order = scope_order,
|
||
.needs_jsx_import = if (comptime only_scan_imports_and_do_not_visit) false else NeedsJSXType{},
|
||
.lexer = lexer,
|
||
|
||
// Only enable during bundling, when not bundling CJS
|
||
.commonjs_named_exports_deoptimized = if (opts.bundle) opts.output_format == .cjs else true,
|
||
};
|
||
this.lexer.track_comments = opts.features.minify_identifiers;
|
||
|
||
this.unwrap_all_requires = brk: {
|
||
if (opts.bundle and opts.output_format != .cjs) {
|
||
if (source.path.packageName()) |pkg| {
|
||
if (opts.features.shouldUnwrapRequire(pkg)) {
|
||
if (strings.eqlComptime(pkg, "react") or strings.eqlComptime(pkg, "react-dom")) {
|
||
const version = opts.package_version;
|
||
if (version.len > 2 and (version[0] == '0' or (version[0] == '1' and version[1] < '8'))) {
|
||
break :brk false;
|
||
}
|
||
}
|
||
|
||
break :brk true;
|
||
}
|
||
}
|
||
}
|
||
|
||
break :brk false;
|
||
};
|
||
|
||
this.symbols = std.ArrayList(Symbol).init(allocator);
|
||
|
||
if (comptime !only_scan_imports_and_do_not_visit) {
|
||
this.import_records = @TypeOf(this.import_records).init(allocator);
|
||
this.named_imports = .{};
|
||
}
|
||
|
||
this.to_expr_wrapper_namespace = Binding2ExprWrapper.Namespace.init(this);
|
||
this.to_expr_wrapper_hoisted = Binding2ExprWrapper.Hoisted.init(this);
|
||
this.import_transposer = @TypeOf(this.import_transposer).init(this);
|
||
this.require_transposer = @TypeOf(this.require_transposer).init(this);
|
||
this.require_resolve_transposer = @TypeOf(this.require_resolve_transposer).init(this);
|
||
|
||
if (opts.features.top_level_await or comptime only_scan_imports_and_do_not_visit) {
|
||
this.fn_or_arrow_data_parse.allow_await = .allow_expr;
|
||
this.fn_or_arrow_data_parse.is_top_level = true;
|
||
}
|
||
|
||
if (comptime !is_typescript_enabled) {
|
||
// This is so it doesn't impact runtime transpiler caching when not in use
|
||
this.options.features.emit_decorator_metadata = false;
|
||
}
|
||
}
|
||
};
|
||
}
|
||
|
||
// Doing this seems to yield a 1% performance improvement parsing larger files
|
||
// ❯ hyperfine "../../build/macos-x86_64/bun node_modules/react-dom/cjs/react-dom.development.js --resolve=disable" "../../bun.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable" --min-runs=500
|
||
// Benchmark #1: ../../build/macos-x86_64/bun node_modules/react-dom/cjs/react-dom.development.js --resolve=disable
|
||
// Time (mean ± σ): 25.1 ms ± 1.1 ms [User: 20.4 ms, System: 3.1 ms]
|
||
// Range (min … max): 23.5 ms … 31.7 ms 500 runs
|
||
|
||
// Benchmark #2: ../../bun.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable
|
||
// Time (mean ± σ): 25.6 ms ± 1.3 ms [User: 20.9 ms, System: 3.1 ms]
|
||
// Range (min … max): 24.1 ms … 39.7 ms 500 runs
|
||
// '../../build/macos-x86_64/bun node_modules/react-dom/cjs/react-dom.development.js --resolve=disable' ran
|
||
// 1.02 ± 0.07 times faster than '../../bun.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable'
|
||
const JavaScriptParser = if (bun.fast_debug_build_mode)
|
||
TSXParser
|
||
else
|
||
NewParser(.{});
|
||
const JSXParser = if (bun.fast_debug_build_mode)
|
||
TSXParser
|
||
else
|
||
NewParser(.{ .jsx = .react });
|
||
pub const TSXParser = NewParser(.{ .jsx = .react, .typescript = true });
|
||
const TypeScriptParser = NewParser(.{ .typescript = true });
|
||
const JavaScriptImportScanner = if (bun.fast_debug_build_mode) TSXImportScanner else NewParser(.{ .scan_only = true });
|
||
const JSXImportScanner = if (bun.fast_debug_build_mode) TSXImportScanner else NewParser(.{ .jsx = .react, .scan_only = true });
|
||
const TSXImportScanner = NewParser(.{ .jsx = .react, .typescript = true, .scan_only = true });
|
||
const TypeScriptImportScanner = if (bun.fast_debug_build_mode) TSXImportScanner else NewParser(.{ .typescript = true, .scan_only = true });
|
||
|
||
// The "await" and "yield" expressions are never allowed in argument lists but
|
||
// may or may not be allowed otherwise depending on the details of the enclosing
|
||
// function or module. This needs to be handled when parsing an arrow function
|
||
// argument list because we don't know if these expressions are not allowed until
|
||
// we reach the "=>" token (or discover the absence of one).
|
||
//
|
||
// Specifically, for await:
|
||
//
|
||
// // This is ok
|
||
// async function foo() { (x = await y) }
|
||
//
|
||
// // This is an error
|
||
// async function foo() { (x = await y) => {} }
|
||
//
|
||
// And for yield:
|
||
//
|
||
// // This is ok
|
||
// function* foo() { (x = yield y) }
|
||
//
|
||
// // This is an error
|
||
// function* foo() { (x = yield y) => {} }
|
||
//
|
||
const DeferredArrowArgErrors = struct {
|
||
invalid_expr_await: logger.Range = logger.Range.None,
|
||
invalid_expr_yield: logger.Range = logger.Range.None,
|
||
};
|
||
|
||
pub fn newLazyExportAST(
|
||
allocator: std.mem.Allocator,
|
||
define: *Define,
|
||
opts: Parser.Options,
|
||
log_to_copy_into: *logger.Log,
|
||
expr: Expr,
|
||
source: *const logger.Source,
|
||
comptime runtime_api_call: []const u8,
|
||
) anyerror!?js_ast.Ast {
|
||
var temp_log = logger.Log.init(allocator);
|
||
const log = &temp_log;
|
||
var parser = Parser{
|
||
.options = opts,
|
||
.allocator = allocator,
|
||
.lexer = js_lexer.Lexer.initWithoutReading(log, source.*, allocator),
|
||
.define = define,
|
||
.source = source,
|
||
.log = log,
|
||
};
|
||
var result = parser.toLazyExportAST(
|
||
expr,
|
||
runtime_api_call,
|
||
) catch |err| {
|
||
if (temp_log.errors == 0) {
|
||
log_to_copy_into.addRangeError(source, parser.lexer.range(), @errorName(err)) catch unreachable;
|
||
}
|
||
|
||
temp_log.appendToMaybeRecycled(log_to_copy_into, source) catch {};
|
||
return null;
|
||
};
|
||
|
||
temp_log.appendToMaybeRecycled(log_to_copy_into, source) catch {};
|
||
result.ast.has_lazy_export = true;
|
||
return result.ast;
|
||
}
|
||
|
||
const WrapMode = enum {
|
||
none,
|
||
bun_commonjs,
|
||
};
|
||
|
||
pub const ConvertESMExportsForHmr = struct {
|
||
last_part: *js_ast.Part,
|
||
imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{},
|
||
export_props: std.ArrayListUnmanaged(G.Property) = .{},
|
||
stmts: std.ArrayListUnmanaged(Stmt) = .{},
|
||
|
||
fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void {
|
||
const new_stmt = switch (stmt.data) {
|
||
else => stmt,
|
||
.s_local => |st| stmt: {
|
||
if (!st.is_export) break :stmt stmt;
|
||
|
||
st.is_export = false;
|
||
|
||
if (st.kind.isReassignable()) {
|
||
for (st.decls.slice()) |decl| {
|
||
try ctx.visitBindingToExport(p, decl.binding, true);
|
||
}
|
||
} else {
|
||
// TODO: remove this dupe
|
||
var dupe_decls = try std.ArrayListUnmanaged(G.Decl).initCapacity(p.allocator, st.decls.len);
|
||
|
||
for (st.decls.slice()) |decl| {
|
||
bun.assert(decl.value != null); // const must be initialized
|
||
|
||
switch (decl.binding.data) {
|
||
.b_missing => {},
|
||
|
||
.b_identifier => |id| {
|
||
const symbol = p.symbols.items[id.ref.inner_index];
|
||
|
||
// if the symbol is not used, we don't need to preserve
|
||
// a binding in this scope. we can move it to the exports object.
|
||
if (symbol.use_count_estimate == 0 and decl.value.?.canBeMoved()) {
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{ .data = symbol.original_name }, decl.binding.loc),
|
||
.value = decl.value,
|
||
});
|
||
} else {
|
||
dupe_decls.appendAssumeCapacity(decl);
|
||
try ctx.visitBindingToExport(p, decl.binding, false);
|
||
}
|
||
},
|
||
|
||
else => {
|
||
dupe_decls.appendAssumeCapacity(decl);
|
||
try ctx.visitBindingToExport(p, decl.binding, false);
|
||
},
|
||
}
|
||
}
|
||
|
||
if (dupe_decls.items.len == 0) {
|
||
return;
|
||
}
|
||
|
||
st.decls = G.Decl.List.fromList(dupe_decls);
|
||
}
|
||
|
||
break :stmt stmt;
|
||
},
|
||
.s_export_default => |st| stmt: {
|
||
// Simple case: we can move this to the default property of the exports object
|
||
if (st.canBeMoved()) {
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||
.value = st.value.toExpr(),
|
||
});
|
||
// no statement emitted
|
||
return;
|
||
}
|
||
|
||
// Otherwise, we need a temporary
|
||
const temp_id = p.generateTempRef("default_export");
|
||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
|
||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
|
||
try p.current_scope.generated.push(p.allocator, temp_id);
|
||
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
|
||
.value = Expr.initIdentifier(temp_id, stmt.loc),
|
||
});
|
||
|
||
break :stmt Stmt.alloc(S.Local, .{
|
||
.kind = .k_const,
|
||
.decls = try G.Decl.List.fromSlice(p.allocator, &.{
|
||
.{
|
||
.binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc),
|
||
.value = st.value.toExpr(),
|
||
},
|
||
}),
|
||
}, stmt.loc);
|
||
},
|
||
.s_class => |st| stmt: {
|
||
// Strip the "export" keyword
|
||
if (!st.is_export) break :stmt stmt;
|
||
|
||
// Export as CommonJS
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{
|
||
.data = p.symbols.items[st.class.class_name.?.ref.?.inner_index].original_name,
|
||
}, stmt.loc),
|
||
.value = Expr.initIdentifier(st.class.class_name.?.ref.?, stmt.loc),
|
||
});
|
||
|
||
st.is_export = false;
|
||
|
||
break :stmt stmt;
|
||
},
|
||
.s_function => |st| stmt: {
|
||
// Strip the "export" keyword
|
||
if (!st.func.flags.contains(.is_export)) break :stmt stmt;
|
||
|
||
st.func.flags.remove(.is_export);
|
||
|
||
// Export as CommonJS
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{
|
||
.data = p.symbols.items[st.func.name.?.ref.?.inner_index].original_name,
|
||
}, stmt.loc),
|
||
.value = Expr.initIdentifier(st.func.name.?.ref.?, stmt.loc),
|
||
});
|
||
|
||
break :stmt stmt;
|
||
},
|
||
.s_export_clause => |st| {
|
||
for (st.items) |item| {
|
||
const ref = item.name.ref.?;
|
||
try ctx.visitRefToExport(p, ref, item.alias, item.name.loc, false);
|
||
}
|
||
|
||
return; // do not emit a statement here
|
||
},
|
||
.s_export_from => |st| stmt: {
|
||
for (st.items) |*item| {
|
||
const ref = item.name.ref.?;
|
||
const symbol = &p.symbols.items[ref.innerIndex()];
|
||
if (symbol.namespace_alias == null) {
|
||
symbol.namespace_alias = .{
|
||
.namespace_ref = st.namespace_ref,
|
||
.alias = item.original_name,
|
||
.import_record_index = st.import_record_index,
|
||
};
|
||
}
|
||
try ctx.visitRefToExport(p, ref, item.alias, item.name.loc, true);
|
||
|
||
// imports and export statements have their alias +
|
||
// original_name swapped. this is likely a design bug in
|
||
// the parser but since everything uses these
|
||
// assumptions, this hack is simpler than making it
|
||
// proper
|
||
const alias = item.alias;
|
||
item.alias = item.original_name;
|
||
item.original_name = alias;
|
||
}
|
||
|
||
const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index);
|
||
if (gop.found_existing) return;
|
||
break :stmt Stmt.alloc(S.Import, .{
|
||
.import_record_index = st.import_record_index,
|
||
.is_single_line = true,
|
||
.default_name = null,
|
||
.items = st.items,
|
||
.namespace_ref = st.namespace_ref,
|
||
.star_name_loc = null,
|
||
}, stmt.loc);
|
||
},
|
||
.s_export_star => {
|
||
bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export * from'", .{});
|
||
},
|
||
// De-duplicate import statements. It is okay to disregard
|
||
// named/default imports here as we always rewrite them as
|
||
// full qualified property accesses (need to so live-bindings)
|
||
.s_import => |st| stmt: {
|
||
const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index);
|
||
if (gop.found_existing) return;
|
||
break :stmt stmt;
|
||
},
|
||
};
|
||
|
||
try ctx.stmts.append(p.allocator, new_stmt);
|
||
}
|
||
|
||
fn visitBindingToExport(
|
||
ctx: *ConvertESMExportsForHmr,
|
||
p: anytype,
|
||
binding: Binding,
|
||
is_live_binding: bool,
|
||
) !void {
|
||
switch (binding.data) {
|
||
.b_missing => {},
|
||
.b_identifier => |id| {
|
||
try ctx.visitRefToExport(p, id.ref, null, binding.loc, is_live_binding);
|
||
},
|
||
.b_array => |array| {
|
||
for (array.items) |item| {
|
||
try ctx.visitBindingToExport(p, item.binding, is_live_binding);
|
||
}
|
||
},
|
||
.b_object => |object| {
|
||
for (object.properties) |item| {
|
||
try ctx.visitBindingToExport(p, item.value, is_live_binding);
|
||
}
|
||
},
|
||
}
|
||
}
|
||
|
||
fn visitRefToExport(
|
||
ctx: *ConvertESMExportsForHmr,
|
||
p: anytype,
|
||
ref: Ref,
|
||
export_symbol_name: ?[]const u8,
|
||
loc: logger.Loc,
|
||
is_live_binding_source: bool,
|
||
) !void {
|
||
const symbol = p.symbols.items[ref.inner_index];
|
||
const id = if (symbol.kind == .import)
|
||
Expr.init(E.ImportIdentifier, .{ .ref = ref }, loc)
|
||
else
|
||
Expr.initIdentifier(ref, loc);
|
||
if (is_live_binding_source or symbol.kind == .import) {
|
||
// TODO: instead of requiring getters for live-bindings,
|
||
// a callback propagation system should be considered.
|
||
// mostly because here, these might not even be live
|
||
// bindings, and re-exports are so, so common.
|
||
const key = Expr.init(E.String, .{
|
||
.data = export_symbol_name orelse symbol.original_name,
|
||
}, loc);
|
||
|
||
// This is technically incorrect in that we've marked this as a
|
||
// top level symbol. but all we care about is preventing name
|
||
// collisions, not necessarily the best minificaiton (dev only)
|
||
const arg1 = p.generateTempRef(symbol.original_name);
|
||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
|
||
try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
|
||
try p.current_scope.generated.push(p.allocator, arg1);
|
||
|
||
// Live bindings need to update the value internally and externally.
|
||
// 'get abc() { return abc }'
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.kind = .get,
|
||
.key = key,
|
||
.value = Expr.init(E.Function, .{ .func = .{
|
||
.body = .{
|
||
.stmts = try p.allocator.dupe(Stmt, &.{
|
||
Stmt.alloc(S.Return, .{ .value = id }, loc),
|
||
}),
|
||
.loc = loc,
|
||
},
|
||
} }, loc),
|
||
});
|
||
// no setter is added since live bindings are read-only
|
||
} else {
|
||
// 'abc,'
|
||
try ctx.export_props.append(p.allocator, .{
|
||
.key = Expr.init(E.String, .{
|
||
.data = export_symbol_name orelse symbol.original_name,
|
||
}, loc),
|
||
.value = id,
|
||
});
|
||
}
|
||
}
|
||
|
||
pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.Part) ![]js_ast.Part {
|
||
if (ctx.export_props.items.len > 0) {
|
||
try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
|
||
.value = Expr.assign(
|
||
Expr.init(E.Dot, .{
|
||
.target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty),
|
||
.name = "exports",
|
||
.name_loc = logger.Loc.Empty,
|
||
}, logger.Loc.Empty),
|
||
Expr.init(E.Object, .{
|
||
.properties = G.Property.List.fromList(ctx.export_props),
|
||
}, logger.Loc.Empty),
|
||
),
|
||
}, logger.Loc.Empty));
|
||
|
||
// mark a dependency on module_ref so it is renamed
|
||
try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 });
|
||
try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true });
|
||
}
|
||
|
||
// TODO: this is a tiny mess. it is honestly trying to hard to merge all parts into one
|
||
for (all_parts[0 .. all_parts.len - 1]) |*part| {
|
||
try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
|
||
try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
|
||
for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
|
||
const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
|
||
if (!gop.found_existing) {
|
||
gop.value_ptr.* = v;
|
||
} else {
|
||
gop.value_ptr.count_estimate += v.count_estimate;
|
||
}
|
||
}
|
||
part.stmts = &.{};
|
||
part.declared_symbols.entries.len = 0;
|
||
part.tag = .dead_due_to_inlining;
|
||
part.dependencies.clearRetainingCapacity();
|
||
try part.dependencies.push(p.allocator, .{
|
||
.part_index = @intCast(all_parts.len - 1),
|
||
.source_index = p.source.index,
|
||
});
|
||
}
|
||
|
||
try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
|
||
try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
|
||
|
||
ctx.last_part.stmts = ctx.stmts.items;
|
||
ctx.last_part.tag = .none;
|
||
|
||
return all_parts;
|
||
}
|
||
};
|
||
|
||
/// Equivalent of esbuild's js_ast_helpers.ToInt32
|
||
fn floatToInt32(f: f64) i32 {
|
||
// Special-case non-finite numbers
|
||
if (!std.math.isFinite(f))
|
||
return 0;
|
||
|
||
const uint: u32 = @intFromFloat(@mod(@abs(f), std.math.maxInt(u32) + 1));
|
||
const int: i32 = @bitCast(uint);
|
||
return if (f < 0) @as(i32, 0) -% int else int;
|
||
}
|