mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
at least it builds
This commit is contained in:
8
src/ast/base.zig
Normal file
8
src/ast/base.zig
Normal file
@@ -0,0 +1,8 @@
|
||||
pub const JavascriptString = []u16;
|
||||
|
||||
pub const NodeIndex = u32;
|
||||
pub const NodeIndexNone = 4294967293;
|
||||
|
||||
pub const BindingNodeIndex = NodeIndex;
|
||||
pub const StmtNodeIndex = NodeIndex;
|
||||
pub const ExprNodeIndex = NodeIndex;
|
||||
@@ -40,7 +40,7 @@ pub const PathName = struct {
|
||||
pub fn nonUniqueNameString(self: *PathName, allocator: *std.mem.Allocator) !string {
|
||||
if (strings.eql("index", self.base)) {
|
||||
if (self.dir.len > 0) {
|
||||
return MutableString.ensureValidIdentifier(PathName.init(self.dir), allocator);
|
||||
return MutableString.ensureValidIdentifier(PathName.init(self.dir).dir, allocator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,13 +84,13 @@ pub const PathName = struct {
|
||||
};
|
||||
|
||||
pub const Path = struct {
|
||||
pretty_path: string,
|
||||
pretty: string,
|
||||
text: string,
|
||||
namespace: string,
|
||||
name: PathName,
|
||||
|
||||
pub fn init(text: string) Path {
|
||||
return Path{ .pretty_path = text, .text = text, .namespace = "file", .name = PathName.init(text) };
|
||||
return Path{ .pretty = text, .text = text, .namespace = "file", .name = PathName.init(text) };
|
||||
}
|
||||
|
||||
pub fn isBefore(a: *Path, b: Path) bool {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const fs = @import("fs.zig");
|
||||
const logger = @import("logger.zig");
|
||||
|
||||
export const ImportKind = enum(u8) {
|
||||
pub const ImportKind = enum(u8) {
|
||||
|
||||
// An entry point provided by the user
|
||||
entry_point,
|
||||
|
||||
300
src/js_ast.zig
300
src/js_ast.zig
@@ -2,20 +2,9 @@ const std = @import("std");
|
||||
const logger = @import("logger.zig");
|
||||
|
||||
usingnamespace @import("strings.zig");
|
||||
usingnamespace @import("ast/base.zig");
|
||||
|
||||
const ast = @import("import_record.zig");
|
||||
|
||||
pub const JavascriptStringValue = []const u16;
|
||||
|
||||
pub const NodeIndex = u32;
|
||||
pub const NodeIndexNone = 4294967293;
|
||||
|
||||
pub const DataIndex = u16;
|
||||
pub const DataIndexNone = 65533;
|
||||
|
||||
pub const BindingNodeIndex = NodeIndex;
|
||||
pub const StmtNodeIndex = Stmt;
|
||||
pub const ExprNodeIndex = Expr;
|
||||
const ImportRecord = @import("import_record.zig").ImportRecord;
|
||||
|
||||
// TODO: figure out if we actually need this
|
||||
// -- original comment --
|
||||
@@ -49,8 +38,6 @@ pub const ImportItemStatus = enum(u8) {
|
||||
|
||||
pub const LocRef = struct { loc: logger.Loc, ref: ?Ref };
|
||||
|
||||
pub const Comment = struct { text: string };
|
||||
|
||||
pub const FnBody = struct {
|
||||
loc: logger.Loc,
|
||||
stmts: []StmtNodeIndex,
|
||||
@@ -73,8 +60,15 @@ pub const Fn = struct {
|
||||
};
|
||||
|
||||
pub const Binding = struct {
|
||||
type_: Type = Type.b_missing,
|
||||
data: B,
|
||||
};
|
||||
|
||||
pub const B = union(enum) {
|
||||
identifier: B.Identifier,
|
||||
array: B.Array,
|
||||
property: B.Property,
|
||||
object: B.Object,
|
||||
missing: B.Missing,
|
||||
|
||||
pub const Type = enum {
|
||||
b_missing,
|
||||
@@ -87,8 +81,6 @@ pub const Binding = struct {
|
||||
ref: Ref,
|
||||
};
|
||||
|
||||
pub const Object = struct { properties: []Property };
|
||||
|
||||
pub const Property = struct {
|
||||
pub const Kind = enum {
|
||||
normal,
|
||||
@@ -97,47 +89,32 @@ pub const Binding = struct {
|
||||
spread,
|
||||
};
|
||||
|
||||
key: NodeIndex,
|
||||
value: NodeIndex = NodeIndexNone,
|
||||
initializer: Kind = Kind.normal,
|
||||
key: ExprNodeIndex,
|
||||
value: ?BindingNodeIndex,
|
||||
kind: Kind = Kind.normal,
|
||||
initializer: ?ExprNodeIndex,
|
||||
is_computed: bool = false,
|
||||
is_method: bool = false,
|
||||
is_static: bool = false,
|
||||
was_shorthand: bool = false,
|
||||
};
|
||||
|
||||
pub const Array = struct {
|
||||
binding: B,
|
||||
};
|
||||
};
|
||||
pub const Object = struct { properties: []Property };
|
||||
|
||||
pub const B = union(enum) {
|
||||
identifier: Binding.Identifier,
|
||||
array: Binding.Array,
|
||||
property: Binding.Property,
|
||||
object: Binding.Object,
|
||||
missing: Binding.Missing,
|
||||
pub const Array = struct { binding: BindingNodeIndex, default_value: ?Expr };
|
||||
|
||||
pub const Missing = struct {};
|
||||
};
|
||||
|
||||
pub const Arg = struct {
|
||||
ts_decorators: ?[]ExprNodeIndex,
|
||||
binding: B,
|
||||
default: ?ExprNodeIndex,
|
||||
ts_decorators: ?[]Expr = null,
|
||||
binding: Binding,
|
||||
default: ?Expr = null,
|
||||
|
||||
// "constructor(public x: boolean) {}"
|
||||
is_typescript_ctor_field: bool = false,
|
||||
};
|
||||
|
||||
pub const Class = struct {
|
||||
class_keyword: logger.Range,
|
||||
ts_decorators: ?[]ExprNodeIndex,
|
||||
name: logger.Loc,
|
||||
extends: ?ExprNodeIndex,
|
||||
body_loc: logger.Loc,
|
||||
properties: ?[]Property,
|
||||
};
|
||||
const _Class = Class;
|
||||
|
||||
pub const ClauseItem = struct {
|
||||
alias: string,
|
||||
alias_loc: logger.Loc,
|
||||
@@ -154,9 +131,52 @@ pub const ClauseItem = struct {
|
||||
original_name: string,
|
||||
};
|
||||
|
||||
pub const Decl = struct {
|
||||
binding: Binding,
|
||||
value: ?ExprNodeIndex,
|
||||
pub const G = struct {
|
||||
pub const Decl = struct {
|
||||
binding: BindingNodeIndex,
|
||||
value: ?ExprNodeIndex = null,
|
||||
};
|
||||
|
||||
pub const NamespaceAlias = struct {
|
||||
namespace_ref: Ref,
|
||||
alias: string,
|
||||
};
|
||||
|
||||
pub const Class = struct {
|
||||
class_keyword: logger.Range,
|
||||
ts_decorators: ?[]ExprNodeIndex = null,
|
||||
name: logger.Loc,
|
||||
extends: ?ExprNodeIndex = null,
|
||||
body_loc: logger.Loc,
|
||||
properties: ?[]Property = null,
|
||||
};
|
||||
|
||||
// invalid shadowing if left as Comment
|
||||
pub const Comment = struct { loc: logger.Loc, text: string };
|
||||
|
||||
pub const Property = struct {
|
||||
ts_decorators: []ExprNodeIndex,
|
||||
key: ExprNodeIndex,
|
||||
|
||||
// This is omitted for class fields
|
||||
value: ?Expr,
|
||||
|
||||
// This is used when parsing a pattern that uses default values:
|
||||
//
|
||||
// [a = 1] = [];
|
||||
// ({a = 1} = {});
|
||||
//
|
||||
// It's also used for class fields:
|
||||
//
|
||||
// class Foo { a = 1 }
|
||||
//
|
||||
initializer: ?ExprNodeIndex = null,
|
||||
kind: B.Property.Kind,
|
||||
is_computed: bool = false,
|
||||
is_method: bool = false,
|
||||
is_static: bool = false,
|
||||
was_shorthand: bool = false,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Symbol = struct {
|
||||
@@ -176,7 +196,7 @@ pub const Symbol = struct {
|
||||
// mode, re-exported symbols are collapsed using MergeSymbols() and renamed
|
||||
// symbols from other files that end up at this symbol must be able to tell
|
||||
// if it has a namespace alias.
|
||||
namespace_alias: *NamespaceAlias,
|
||||
namespace_alias: *G.NamespaceAlias,
|
||||
|
||||
// Used by the parser for single pass parsing. Symbols that have been merged
|
||||
// form a linked-list where the last link is the symbol to use. This link is
|
||||
@@ -401,6 +421,10 @@ pub const Symbol = struct {
|
||||
return kind == Symbol.Kind.hoisted or kind == Symbol.Kind.hoisted_function;
|
||||
}
|
||||
|
||||
pub fn isHoisted(self: *Symbol) bool {
|
||||
return Symbol.isKindHoisted(self.kind);
|
||||
}
|
||||
|
||||
pub fn isKindHoistedOrFunction(kind: Symbol.Kind) bool {
|
||||
return isKindHoisted(kind) or kind == Symbol.Kind.generator_or_async_function;
|
||||
}
|
||||
@@ -429,7 +453,7 @@ pub const E = struct {
|
||||
|
||||
pub const Unary = struct {
|
||||
op: Op.Code,
|
||||
value: Expr,
|
||||
value: ExprNodeIndex,
|
||||
};
|
||||
|
||||
pub const Binary = struct {
|
||||
@@ -518,7 +542,6 @@ pub const E = struct {
|
||||
};
|
||||
|
||||
pub const Function = Fn;
|
||||
pub const Class = _Class;
|
||||
|
||||
pub const Identifier = struct {
|
||||
ref: Ref = Ref.None,
|
||||
@@ -577,8 +600,8 @@ pub const E = struct {
|
||||
|
||||
pub const JSXElement = struct {
|
||||
tag: ?ExprNodeIndex,
|
||||
properties: []Property,
|
||||
children: []Expr,
|
||||
properties: []G.Property,
|
||||
children: []ExprNodeIndex,
|
||||
};
|
||||
|
||||
pub const Missing = struct {};
|
||||
@@ -590,46 +613,48 @@ pub const E = struct {
|
||||
};
|
||||
|
||||
pub const Object = struct {
|
||||
properties: []Property,
|
||||
properties: []G.Property,
|
||||
comma_after_spread: logger.Loc,
|
||||
is_single_line: bool,
|
||||
is_parenthesized: bool,
|
||||
};
|
||||
|
||||
pub const Spread = struct { value: Expr };
|
||||
pub const Spread = struct { value: ExprNodeIndex };
|
||||
|
||||
pub const String = struct {
|
||||
value: JavascriptStringValue,
|
||||
value: JavascriptString,
|
||||
legacy_octal_loc: logger.Loc,
|
||||
prefer_template: bool,
|
||||
};
|
||||
|
||||
// value is in the Node
|
||||
pub const TemplatePart = struct {
|
||||
value: Expr,
|
||||
value: ExprNodeIndex,
|
||||
tail_loc: logger.Loc,
|
||||
tail: JavascriptStringValue,
|
||||
tail: JavascriptString,
|
||||
tail_raw: string,
|
||||
};
|
||||
|
||||
pub const Template = struct { tag: ?ExprNodeIndex, head: JavascriptStringValue, head_raw: string, // This is only filled out for tagged template literals
|
||||
pub const Template = struct { tag: ?ExprNodeIndex, head: JavascriptString, head_raw: string, // This is only filled out for tagged template literals
|
||||
parts: ?[]TemplatePart, legacy_octal_loc: logger.Loc };
|
||||
|
||||
pub const RegExp = struct {
|
||||
value: string,
|
||||
};
|
||||
|
||||
pub const Await = struct { value: Expr };
|
||||
pub const Class = G.Class;
|
||||
|
||||
pub const Await = struct { value: ExprNodeIndex };
|
||||
|
||||
pub const Yield = struct {
|
||||
value: ?Expr,
|
||||
value: ?ExprNodeIndex,
|
||||
is_star: bool,
|
||||
};
|
||||
|
||||
pub const If = struct {
|
||||
test_: Expr,
|
||||
yes: Expr,
|
||||
no: Expr,
|
||||
test_: ExprNodeIndex,
|
||||
yes: ExprNodeIndex,
|
||||
no: ExprNodeIndex,
|
||||
};
|
||||
|
||||
pub const RequireOrRequireResolve = struct {
|
||||
@@ -637,7 +662,7 @@ pub const E = struct {
|
||||
};
|
||||
|
||||
pub const Import = struct {
|
||||
expr: Expr,
|
||||
expr: ExprNodeIndex,
|
||||
import_record_index: u32,
|
||||
|
||||
// Comments inside "import()" expressions have special meaning for Webpack.
|
||||
@@ -647,7 +672,7 @@ pub const E = struct {
|
||||
// because esbuild is not Webpack. But we do preserve them since doing so is
|
||||
// harmless, easy to maintain, and useful to people. See the Webpack docs for
|
||||
// more info: https://webpack.js.org/api/module-methods/#magic-comments.
|
||||
leading_interior_comments: []Comment,
|
||||
leading_interior_comments: []G.Comment,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -685,6 +710,21 @@ pub const Stmt = struct {
|
||||
s_break: S.Break,
|
||||
s_continue: S.Continue,
|
||||
};
|
||||
|
||||
pub fn caresAboutScope(self: *Stmt) bool {
|
||||
return switch (self.data) {
|
||||
.s_block, .s_empty, .s_debugger, .s_expr, .s_if, .s_for, .s_for_in, .s_for_of, .s_do_while, .s_while, .s_with, .s_try, .s_switch, .s_return, .s_throw, .s_break, .s_continue, .s_directive => {
|
||||
return false;
|
||||
},
|
||||
|
||||
.s_local => |local| {
|
||||
return local.kind != Kind.k_var;
|
||||
},
|
||||
else => {
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Expr = struct {
|
||||
@@ -761,7 +801,7 @@ pub const Expr = struct {
|
||||
pub const EnumValue = struct {
|
||||
loc: logger.Loc,
|
||||
ref: Ref,
|
||||
name: []u16,
|
||||
name: JavascriptString,
|
||||
value: ?ExprNodeIndex,
|
||||
};
|
||||
|
||||
@@ -770,7 +810,7 @@ pub const S = struct {
|
||||
|
||||
pub const Comment = struct { text: string };
|
||||
|
||||
pub const Directive = struct { value: JavascriptStringValue, legacy_octal_loc: logger.Loc };
|
||||
pub const Directive = struct { value: JavascriptString, legacy_octal_loc: logger.Loc };
|
||||
|
||||
pub const ExportClause = struct { items: []ClauseItem };
|
||||
|
||||
@@ -812,14 +852,14 @@ pub const S = struct {
|
||||
};
|
||||
|
||||
pub const Class = struct {
|
||||
class: _Class,
|
||||
class: G.Class,
|
||||
is_export: bool,
|
||||
};
|
||||
|
||||
pub const If = struct {
|
||||
test_: ExprNodeIndex,
|
||||
yes: StmtNodeIndex,
|
||||
no: StmtNodeIndex = NodeIndexNone,
|
||||
no: ?StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const For = struct {
|
||||
@@ -877,14 +917,14 @@ pub const S = struct {
|
||||
// Otherwise: This is an auto-generated Ref for the namespace representing
|
||||
// the imported file. In this case StarLoc is nil. The NamespaceRef is used
|
||||
// when converting this module to a CommonJS module.
|
||||
namespace_ref: Ref, default_name: *LocRef, items: *[]ClauseItem, star_name_loc: *logger.Loc, import_record_index: uint32, is_single_line: bool };
|
||||
namespace_ref: Ref, default_name: *LocRef, items: *[]ClauseItem, star_name_loc: *logger.Loc, import_record_index: u32, is_single_line: bool };
|
||||
|
||||
pub const Return = struct {};
|
||||
pub const Throw = struct {};
|
||||
|
||||
pub const Local = struct {
|
||||
kind: Kind = Kind.k_var,
|
||||
decls: []Decl,
|
||||
decls: []G.Decl,
|
||||
is_export: bool = false,
|
||||
// The TypeScript compiler doesn't generate code for "import foo = bar"
|
||||
// statements where the import is never used.
|
||||
@@ -908,7 +948,7 @@ pub const S = struct {
|
||||
|
||||
pub const Catch = struct {
|
||||
loc: logger.Loc,
|
||||
binding: *B,
|
||||
binding: ?BindingNodeIndex,
|
||||
body: []StmtNodeIndex,
|
||||
};
|
||||
|
||||
@@ -1014,9 +1054,9 @@ pub const Op = struct {
|
||||
member,
|
||||
};
|
||||
|
||||
text: []const u8,
|
||||
text: string,
|
||||
level: Level,
|
||||
is_keyword: bool,
|
||||
is_keyword: bool = false,
|
||||
|
||||
const Table = []Op{
|
||||
// Prefix
|
||||
@@ -1087,13 +1127,13 @@ pub const Op = struct {
|
||||
};
|
||||
|
||||
pub const ArrayBinding = struct {
|
||||
binding: Binding,
|
||||
binding: BindingNodeIndex,
|
||||
default_value: ?ExprNodeIndex,
|
||||
};
|
||||
|
||||
pub const Ast = struct {
|
||||
approximate_line_count: i32 = 0,
|
||||
has_lazy_export = false,
|
||||
has_lazy_export: bool = false,
|
||||
|
||||
// This is a list of CommonJS features. When a file uses CommonJS features,
|
||||
// it's not a candidate for "flat bundling" and must be wrapped in its own
|
||||
@@ -1105,19 +1145,19 @@ pub const Ast = struct {
|
||||
|
||||
// This is a list of ES6 features. They are ranges instead of booleans so
|
||||
// that they can be used in log messages. Check to see if "Len > 0".
|
||||
import_keyword: logger.Range = logger.Range.Empty, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = logger.Range.Empty, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = logger.Range.Empty,
|
||||
import_keyword: ?logger.Range = null, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: ?logger.Range = null, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: ?logger.Range = null,
|
||||
|
||||
// These are stored at the AST level instead of on individual AST nodes so
|
||||
// they can be manipulated efficiently without a full AST traversal
|
||||
import_records: []ast.ImportRecord,
|
||||
import_records: ?[]ImportRecord = null,
|
||||
|
||||
hashbang: ?string,
|
||||
directive: ?string,
|
||||
url_for_css: ?string,
|
||||
parts: std.ArrayList([]Part),
|
||||
symbols: std.ArrayList([]Symbol),
|
||||
hashbang: ?string = null,
|
||||
directive: ?string = null,
|
||||
url_for_css: ?string = null,
|
||||
parts: std.ArrayList(Part),
|
||||
symbols: std.ArrayList(Symbol),
|
||||
module_scope: ?Scope,
|
||||
// char_freq: *CharFreq,
|
||||
exports_ref: ?Ref,
|
||||
@@ -1128,9 +1168,9 @@ pub const Ast = struct {
|
||||
// since we already have to traverse the AST then anyway and the parser pass
|
||||
// is conveniently fully parallelized.
|
||||
named_imports: std.AutoHashMap(Ref, NamedImport),
|
||||
named_exports: std.AutoHashMap(string, NamedExport),
|
||||
named_exports: std.StringHashMap(NamedExport),
|
||||
top_level_symbol_to_parts: std.AutoHashMap(Ref, []u32),
|
||||
export_star_import_records: std.ArrayList([]u32),
|
||||
export_star_import_records: std.ArrayList(u32),
|
||||
};
|
||||
|
||||
pub const Span = struct {
|
||||
@@ -1185,7 +1225,8 @@ pub const Dependency = struct {
|
||||
// shaking and can be assigned to separate chunks (i.e. output files) by code
|
||||
// splitting.
|
||||
pub const Part = struct {
|
||||
stmts: []StmtNodeIndex,
|
||||
stmts: []Stmt,
|
||||
expr: []Expr,
|
||||
scopes: []*Scope,
|
||||
|
||||
// Each is an index into the file-level import record list
|
||||
@@ -1217,11 +1258,35 @@ pub const Part = struct {
|
||||
// This is true if this file has been marked as live by the tree shaking
|
||||
// algorithm.
|
||||
is_live: bool = false,
|
||||
|
||||
pub fn stmtAt(self: *Part, index: StmtNodeIndex) ?Stmt {
|
||||
if (std.builtin.mode == std.builtin.Mode.ReleaseFast) {
|
||||
return self.stmts[@intCast(usize, index)];
|
||||
} else {
|
||||
if (self.stmts.len > index) {
|
||||
return self.stmts[@intCast(usize, index)];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exprAt(self: *Part, index: ExprNodeIndex) ?Expr {
|
||||
if (std.builtin.mode == std.builtin.Mode.ReleaseFast) {
|
||||
return self.expr[@intCast(usize, index)];
|
||||
} else {
|
||||
if (self.expr.len > index) {
|
||||
return self.expr[@intCast(usize, index)];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const StmtOrExpr = union(enum) {
|
||||
stmt: Stmt,
|
||||
expr: Expr,
|
||||
stmt: StmtNodeIndex,
|
||||
expr: ExprNodeIndex,
|
||||
};
|
||||
|
||||
pub const NamedImport = struct {
|
||||
@@ -1260,13 +1325,13 @@ pub const StrictModeKind = enum {
|
||||
|
||||
pub const Scope = struct {
|
||||
kind: Kind = Kind.block,
|
||||
parent: ?Scope,
|
||||
parent: ?*Scope,
|
||||
children: []*Scope,
|
||||
members: std.AutoHashMap(string, Member),
|
||||
generated: ?[]Ref,
|
||||
members: std.StringHashMap(Member),
|
||||
generated: ?[]Ref = null,
|
||||
|
||||
// This is used to store the ref of the label symbol for ScopeLabel scopes.
|
||||
label_ref: ?Ref,
|
||||
label_ref: ?Ref = null,
|
||||
label_stmt_is_loop: bool = false,
|
||||
|
||||
// If a scope contains a direct eval() expression, then none of the symbols
|
||||
@@ -1277,7 +1342,7 @@ pub const Scope = struct {
|
||||
// This is to help forbid "arguments" inside class body scopes
|
||||
forbid_arguments: bool = false,
|
||||
|
||||
strict_mode: StrictModeKind = StrictModeKind.explicit_strict_mode,
|
||||
strict_mode: StrictModeKind = StrictModeKind.sloppy_mode,
|
||||
|
||||
pub const Member = struct { ref: Ref, loc: logger.Loc };
|
||||
pub const Kind = enum(u8) {
|
||||
@@ -1292,30 +1357,27 @@ pub const Scope = struct {
|
||||
function_args,
|
||||
function_body,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
pub fn ensureValidIdentifier(base: string, allocator: *std.mem.Allocator) string {
|
||||
// Convert it to an ASCII identifier. Note: If you change this to a non-ASCII
|
||||
// identifier, you're going to potentially cause trouble with non-BMP code
|
||||
// points in target environments that don't support bracketed Unicode escapes.
|
||||
var needsGap = false;
|
||||
var str = MutableString.initCopy(allocator: *std.mem.Allocator, str: anytype)
|
||||
for (base) |c| {
|
||||
if (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (len(bytes) > 0 && c >= '0' && c <= '9') {
|
||||
if needsGap {
|
||||
bytes = append(bytes, '_')
|
||||
needsGap = false
|
||||
}
|
||||
bytes = append(bytes, byte(c))
|
||||
} else if len(bytes) > 0 {
|
||||
needsGap = true
|
||||
}
|
||||
pub fn recursiveSetStrictMode(s: *Scope, kind: StrictModeKind) void {
|
||||
if (s.strict_mode == .sloppy_mode) {
|
||||
s.strict_mode = kind;
|
||||
for (s.children) |child| {
|
||||
child.recursiveSetStrictMode(kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the name isn't empty
|
||||
if len(bytes) == 0 {
|
||||
return "_"
|
||||
}
|
||||
return string(bytes)
|
||||
}
|
||||
pub fn kindStopsHoisting(s: *Scope) bool {
|
||||
return @enumToInt(s.kind) > @enumToInt(Kind.entry);
|
||||
}
|
||||
|
||||
pub fn initPtr(allocator: *std.mem.Allocator) !*Scope {
|
||||
var scope = try allocator.create(Scope);
|
||||
scope.members = @TypeOf(scope.members).init(allocator);
|
||||
return scope;
|
||||
}
|
||||
};
|
||||
|
||||
// test "ast" {
|
||||
// const ast = Ast{};
|
||||
// }
|
||||
|
||||
@@ -3,7 +3,9 @@ const logger = @import("logger.zig");
|
||||
const tables = @import("js_lexer_tables.zig");
|
||||
const alloc = @import("alloc.zig");
|
||||
const build_options = @import("build_options");
|
||||
const js_ast = @import("js_ast.zig");
|
||||
|
||||
usingnamespace @import("ast/base.zig");
|
||||
usingnamespace @import("strings.zig");
|
||||
|
||||
const _f = @import("./test/fixtures.zig");
|
||||
@@ -33,25 +35,25 @@ pub const Lexer = struct {
|
||||
start: usize = 0,
|
||||
end: usize = 0,
|
||||
approximate_newline_count: i32 = 0,
|
||||
legacy_octal_loc: logger.Loc = 0,
|
||||
previous_backslash_quote_in_jsx: logger.Range = logger.Range{},
|
||||
legacy_octal_loc: logger.Loc = logger.Loc.Empty,
|
||||
previous_backslash_quote_in_jsx: logger.Range = logger.Range.None,
|
||||
token: T = T.t_end_of_file,
|
||||
has_newline_before: bool = false,
|
||||
has_pure_comment_before: bool = false,
|
||||
preserve_all_comments_before: bool = false,
|
||||
is_legacy_octal_literal: bool = false,
|
||||
// comments_to_preserve_before: []js_ast.Comment,
|
||||
// all_original_comments: []js_ast.Comment,
|
||||
comments_to_preserve_before: ?[]js_ast.G.Comment = null,
|
||||
all_original_comments: ?[]js_ast.G.Comment = null,
|
||||
code_point: CodePoint = -1,
|
||||
string_literal: []u16,
|
||||
string_literal: JavascriptString,
|
||||
identifier: []const u8 = "",
|
||||
// jsx_factory_pragma_comment: js_ast.Span,
|
||||
// jsx_fragment_pragma_comment: js_ast.Span,
|
||||
// source_mapping_url: js_ast.Span,
|
||||
jsx_factory_pragma_comment: ?js_ast.Span = null,
|
||||
jsx_fragment_pragma_comment: ?js_ast.Span = null,
|
||||
source_mapping_url: ?js_ast.Span = null,
|
||||
number: f64 = 0.0,
|
||||
rescan_close_brace_as_template_token: bool = false,
|
||||
for_global_name: bool = false,
|
||||
prev_error_loc: i32 = -1,
|
||||
prev_error_loc: logger.Loc = logger.Loc.Empty,
|
||||
allocator: *std.mem.Allocator,
|
||||
|
||||
fn nextCodepointSlice(it: *Lexer) callconv(.Inline) ?[]const u8 {
|
||||
@@ -76,7 +78,7 @@ pub const Lexer = struct {
|
||||
|
||||
pub fn addError(self: *Lexer, _loc: usize, comptime format: []const u8, args: anytype, panic: bool) void {
|
||||
const loc = logger.usize2Loc(_loc);
|
||||
if (loc == self.prev_error_loc) {
|
||||
if (eql(loc, self.prev_error_loc)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -90,7 +92,7 @@ pub const Lexer = struct {
|
||||
}
|
||||
|
||||
pub fn addRangeError(self: *Lexer, range: logger.Range, comptime format: []const u8, args: anytype, panic: bool) void {
|
||||
if (loc == self.prev_error_loc) {
|
||||
if (eql(loc, self.prev_error_loc)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -753,12 +755,12 @@ pub const Lexer = struct {
|
||||
}
|
||||
|
||||
pub fn init(log: logger.Log, source: logger.Source, allocator: *std.mem.Allocator) !Lexer {
|
||||
var empty_string_literal: []u16 = undefined;
|
||||
var empty_string_literal: JavascriptString = undefined;
|
||||
var lex = Lexer{
|
||||
.log = log,
|
||||
.source = source,
|
||||
.string_literal = empty_string_literal,
|
||||
.prev_error_loc = -1,
|
||||
.prev_error_loc = logger.Loc.Empty,
|
||||
.allocator = allocator,
|
||||
};
|
||||
lex.step();
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
const std = @import("std");
|
||||
const logger = @import("logger.zig");
|
||||
const lexer = @import("js_lexer.zig");
|
||||
const ast = @import("js_ast.zig");
|
||||
const js_lexer = @import("js_lexer.zig");
|
||||
const importRecord = @import("import_record.zig");
|
||||
const js_ast = @import("js_ast.zig");
|
||||
const options = @import("options.zig");
|
||||
const alloc = @import("alloc.zig");
|
||||
usingnamespace @import("strings.zig");
|
||||
|
||||
const Comment = js_ast._Comment;
|
||||
const locModuleScope = logger.Loc.Empty;
|
||||
|
||||
const TempRef = struct {
|
||||
ref: js_ast.Ref,
|
||||
value: *js_ast.Expr,
|
||||
@@ -12,13 +17,13 @@ const TempRef = struct {
|
||||
|
||||
const ImportNamespaceCallOrConstruct = struct {
|
||||
ref: js_ast.Ref,
|
||||
is_construct: bool,
|
||||
is_construct: bool = false,
|
||||
};
|
||||
|
||||
const ThenCatchChain = struct {
|
||||
next_target: js_ast.E,
|
||||
has_multiple_args: bool,
|
||||
has_catch: bool,
|
||||
has_multiple_args: bool = false,
|
||||
has_catch: bool = false,
|
||||
};
|
||||
|
||||
const Map = std.AutoHashMap;
|
||||
@@ -30,7 +35,7 @@ const StringRefMap = std.StringHashMap(js_ast.Ref);
|
||||
const StringBoolMap = std.StringHashMap(bool);
|
||||
const RefBoolMap = Map(js_ast.Ref, bool);
|
||||
const RefRefMap = Map(js_ast.Ref, js_ast.Ref);
|
||||
const ImportRecord = @import("import_record.zig").ImportRecord;
|
||||
const ImportRecord = importRecord.ImportRecord;
|
||||
const ScopeOrder = struct {
|
||||
loc: logger.Loc,
|
||||
scope: *js_ast.Scope,
|
||||
@@ -130,36 +135,41 @@ const PropertyOpts = struct {
|
||||
|
||||
pub const Parser = struct {
|
||||
options: Options,
|
||||
lexer: lexer.Lexer,
|
||||
lexer: js_lexer.Lexer,
|
||||
log: logger.Log,
|
||||
source: logger.Source,
|
||||
allocator: *std.mem.Allocator,
|
||||
p: ?*P,
|
||||
|
||||
pub const Result = struct { ast: ast.Ast, ok: bool = false };
|
||||
pub const Result = struct { ast: js_ast.Ast, ok: bool = false };
|
||||
|
||||
const Options = struct {
|
||||
pub const Options = struct {
|
||||
jsx: options.JSX,
|
||||
asciiOnly: bool = true,
|
||||
keepNames: bool = true,
|
||||
mangleSyntax: bool = false,
|
||||
mangeIdentifiers: bool = false,
|
||||
omitRuntimeForTests: bool = false,
|
||||
ignoreDCEAnnotations: bool = true,
|
||||
preserveUnusedImportsTS: bool = false,
|
||||
useDefineForClassFields: bool = false,
|
||||
suppressWarningsAboutWeirdCode = true,
|
||||
ascii_only: bool = true,
|
||||
keep_names: bool = true,
|
||||
mangle_syntax: bool = false,
|
||||
mange_identifiers: bool = false,
|
||||
omit_runtime_for_tests: bool = false,
|
||||
ignore_dce_annotations: bool = true,
|
||||
preserve_unused_imports_ts: bool = false,
|
||||
use_define_for_class_fields: bool = false,
|
||||
suppress_warnings_about_weird_code: bool = true,
|
||||
moduleType: ModuleType = ModuleType.esm,
|
||||
};
|
||||
|
||||
pub fn parse(self: *Parser) !Result {
|
||||
if (self.p == null) {
|
||||
self.p = try P.init(allocator, self.log, self.source, self.lexer, &self.options);
|
||||
self.p = try P.init(self.allocator, self.log, self.source, self.lexer, self.options);
|
||||
}
|
||||
|
||||
var result: Result = undefined;
|
||||
|
||||
if (self.p) |p| {}
|
||||
if (self.p) |p| {
|
||||
// Parse the file in the first pass, but do not bind symbols
|
||||
var opts = ParseStatementOptions{ .is_module_scope = true };
|
||||
const stmts = try p.parseStmtsUpTo(js_lexer.T.t_end_of_file, &opts);
|
||||
try p.prepareForVisitPass();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -167,7 +177,7 @@ pub const Parser = struct {
|
||||
pub fn init(transform: options.TransformOptions, allocator: *std.mem.Allocator) !Parser {
|
||||
const log = logger.Log{ .msgs = List(logger.Msg).init(allocator) };
|
||||
const source = logger.Source.initFile(transform.entry_point, allocator);
|
||||
const lexer = try lexer.Lexer.init(log, source, allocator);
|
||||
const lexer = try js_lexer.Lexer.init(log, source, allocator);
|
||||
return Parser{
|
||||
.options = Options{
|
||||
.jsx = options.JSX{
|
||||
@@ -176,7 +186,7 @@ pub const Parser = struct {
|
||||
.fragment = transform.jsx_fragment,
|
||||
},
|
||||
},
|
||||
|
||||
.allocator = allocator,
|
||||
.lexer = lexer,
|
||||
.source = source,
|
||||
.log = log,
|
||||
@@ -194,19 +204,19 @@ scopeIndex: usize };
|
||||
const LexicalDecl = enum(u8) { forbid, allow_all, allow_fn_inside_if, allow_fn_inside_label };
|
||||
|
||||
const ParseStatementOptions = struct {
|
||||
ts_decorators: *DeferredTsDecorators,
|
||||
ts_decorators: ?DeferredTsDecorators = null,
|
||||
lexical_decl: LexicalDecl = .forbid,
|
||||
is_module_scope: bool = false,
|
||||
is_namespace_scope: bool = false,
|
||||
is_export: bool = false,
|
||||
is_name_optional: bool = false, // For "export default" pseudo-statements,
|
||||
is_type_script_declare: bool = false1,
|
||||
is_typescript_declare: bool = false,
|
||||
};
|
||||
|
||||
// P is for Parser!
|
||||
const P = struct {
|
||||
allocator: *std.mem.Allocator,
|
||||
options: Options,
|
||||
options: Parser.Options,
|
||||
log: logger.Log,
|
||||
source: logger.Source,
|
||||
lexer: js_lexer.Lexer,
|
||||
@@ -221,8 +231,8 @@ const P = struct {
|
||||
fn_or_arrow_data_visit: FnOrArrowDataVisit,
|
||||
fn_only_data_visit: FnOnlyDataVisit,
|
||||
allocated_names: List(string),
|
||||
latest_arrow_arg_loc: logger.Loc = -1,
|
||||
forbid_suffix_after_as_loc: logger.Loc = -1,
|
||||
latest_arrow_arg_loc: logger.Loc = logger.Loc.Empty,
|
||||
forbid_suffix_after_as_loc: logger.Loc = logger.Loc.Empty,
|
||||
current_scope: *js_ast.Scope,
|
||||
scopes_for_current_part: List(*js_ast.Scope),
|
||||
symbols: List(js_ast.Symbol),
|
||||
@@ -280,10 +290,10 @@ const P = struct {
|
||||
export_star_import_records: List(u32),
|
||||
|
||||
// These are for handling ES6 imports and exports
|
||||
es6_import_keyword: logger.Range = logger.Range.Empty,
|
||||
es6_export_keyword: logger.Range = logger.Range.Empty,
|
||||
enclosing_class_keyword: logger.Range = logger.Range.Empty,
|
||||
import_items_for_namespace: Map(js_ast.Ref, map(string, js_ast.LocRef)),
|
||||
es6_import_keyword: logger.Range = logger.Range.None,
|
||||
es6_export_keyword: logger.Range = logger.Range.None,
|
||||
enclosing_class_keyword: logger.Range = logger.Range.None,
|
||||
import_items_for_namespace: Map(js_ast.Ref, std.StringHashMap(js_ast.LocRef)),
|
||||
is_import_item: RefBoolMap,
|
||||
named_imports: Map(js_ast.Ref, js_ast.NamedImport),
|
||||
named_exports: std.StringHashMap(js_ast.NamedExport),
|
||||
@@ -314,7 +324,7 @@ const P = struct {
|
||||
call_target: js_ast.E,
|
||||
delete_target: js_ast.E,
|
||||
loop_body: js_ast.S,
|
||||
module_scope: ?js_ast.Scope = null,
|
||||
module_scope: *js_ast.Scope = undefined,
|
||||
is_control_flow_dead: bool = false,
|
||||
|
||||
// Inside a TypeScript namespace, an "export declare" statement can be used
|
||||
@@ -408,7 +418,7 @@ const P = struct {
|
||||
// AssignmentExpression
|
||||
// Expression , AssignmentExpression
|
||||
//
|
||||
after_arrow_body_loc: logger.Loc = -1,
|
||||
after_arrow_body_loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
pub fn deinit(parser: *P) void {
|
||||
parser.allocated_names.deinit();
|
||||
@@ -483,12 +493,68 @@ const P = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prepareForVisitPass(p: *P) !void {
|
||||
try p.pushScopeForVisitPass(js_ast.Scope.Kind.entry, locModuleScope);
|
||||
p.fn_or_arrow_data_visit.is_outside_fn_or_arrow = true;
|
||||
p.module_scope = p.current_scope;
|
||||
p.has_es_module_syntax = p.es6_import_keyword.len > 0 or p.es6_export_keyword.len > 0 or p.top_level_await_keyword.len > 0;
|
||||
|
||||
// ECMAScript modules are always interpreted as strict mode. This has to be
|
||||
// done before "hoistSymbols" because strict mode can alter hoisting (!).
|
||||
if (p.es6_import_keyword.len > 0) {
|
||||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_import);
|
||||
} else if (p.es6_export_keyword.len > 0) {
|
||||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_export);
|
||||
} else if (p.top_level_await_keyword.len > 0) {
|
||||
p.module_scope.recursiveSetStrictMode(js_ast.StrictModeKind.implicit_strict_mode_top_level_await);
|
||||
}
|
||||
|
||||
p.hoistSymbols(p.module_scope);
|
||||
}
|
||||
|
||||
pub fn hoistSymbols(p: *P, scope: *js_ast.Scope) void {
|
||||
if (!scope.kindStopsHoisting()) {
|
||||
var iter = scope.members.iterator();
|
||||
nextMember: while (iter.next()) |res| {
|
||||
var symbol = p.symbols.items[res.value.ref.inner_index];
|
||||
if (!symbol.isHoisted()) {
|
||||
continue :nextMember;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unshiftScopeOrder(self: *P) !ScopeOrder {
|
||||
if (self.scopes_in_order.items.len == 0) {
|
||||
var scope = try js_ast.Scope.initPtr(self.allocator);
|
||||
return ScopeOrder{
|
||||
.scope = scope,
|
||||
.loc = logger.Loc.Empty,
|
||||
};
|
||||
} else {
|
||||
return self.scopes_in_order.orderedRemove(0);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pushScopeForVisitPass(p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) !void {
|
||||
const order = try p.unshiftScopeOrder();
|
||||
|
||||
// Sanity-check that the scopes generated by the first and second passes match
|
||||
if (nql(order.loc, loc) or nql(order.scope.kind, kind)) {
|
||||
std.debug.panic("Expected scope ({s}, {d}) in {s}, found scope ({s}, {d})", .{ kind, loc.start, p.source.path.pretty, order.scope.kind, order.loc.start });
|
||||
}
|
||||
|
||||
p.current_scope = order.scope;
|
||||
|
||||
try p.scopes_for_current_part.append(order.scope);
|
||||
}
|
||||
|
||||
pub fn pushScopeForParsePass(p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) !int {
|
||||
var parent = p.current_scope;
|
||||
var scope = try p.allocator.create(js_ast.Scope);
|
||||
var scope = js_ast.Scope.initPtr(p.allocator);
|
||||
scope.kind = kind;
|
||||
scope.parent = parent;
|
||||
scope.members = scope.members.init(p.allocator);
|
||||
|
||||
scope.label_ref = null;
|
||||
|
||||
if (parent) |_parent| {
|
||||
@@ -506,47 +572,73 @@ const P = struct {
|
||||
}
|
||||
}
|
||||
|
||||
// // Copy down function arguments into the function body scope. That way we get
|
||||
// // errors if a statement in the function body tries to re-declare any of the
|
||||
// // arguments.
|
||||
// if kind == js_ast.ScopeFunctionBody {
|
||||
// if scope.Parent.Kind != js_ast.ScopeFunctionArgs {
|
||||
// panic("Internal error")
|
||||
// }
|
||||
// for name, member := range scope.Parent.Members {
|
||||
// // Don't copy down the optional function expression name. Re-declaring
|
||||
// // the name of a function expression is allowed.
|
||||
// kind := p.symbols[member.Ref.InnerIndex].Kind
|
||||
// if kind != js_ast.SymbolHoistedFunction {
|
||||
// scope.Members[name] = member
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// Copy down function arguments into the function body scope. That way we get
|
||||
// errors if a statement in the function body tries to re-declare any of the
|
||||
// arguments.
|
||||
if (kind == js_ast.ScopeFunctionBody) {
|
||||
if (scope.parent.kind != js_ast.ScopeFunctionArgs) {
|
||||
std.debug.panic("Internal error");
|
||||
}
|
||||
|
||||
// for name, member := range scope.parent.members {
|
||||
// // Don't copy down the optional function expression name. Re-declaring
|
||||
// // the name of a function expression is allowed.
|
||||
// kind := p.symbols[member.Ref.InnerIndex].Kind
|
||||
// if kind != js_ast.SymbolHoistedFunction {
|
||||
// scope.Members[name] = member
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, log: logger.Log, source: logger.Source, lexer: js_lexer.Lexer, options: *Options) !*Parser {
|
||||
pub fn parseStmtsUpTo(p: *P, eend: js_lexer.T, opts: *ParseStatementOptions) ![]js_ast.Stmt {
|
||||
var stmts = List(js_ast.Stmt).init(p.allocator);
|
||||
try stmts.ensureCapacity(1);
|
||||
|
||||
var returnWithoutSemicolonStart: i32 = -1;
|
||||
opts.lexical_decl = .allow_all;
|
||||
var isDirectivePrologue = true;
|
||||
|
||||
while (true) {
|
||||
// var comments = p.lexer
|
||||
}
|
||||
|
||||
return stmts.toOwnedSlice();
|
||||
}
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, log: logger.Log, source: logger.Source, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
|
||||
var parser = try allocator.create(P);
|
||||
parser.allocated_names = List(string).init(allocator);
|
||||
parser.scopes_for_current_part = List(*js_ast.Scope).init(allocator);
|
||||
parser.symbols = List(js_ast.Symbol).init(allocator);
|
||||
parser.ts_use_counts = List(u32).init(allocator);
|
||||
parser.declared_symbols = List(js_ast.DeclaredSymbol).init(allocator);
|
||||
parser.known_enum_values = Map(js_ast.Ref, std.StringHashMap(f64)).init(allocator);
|
||||
parser.import_records = List(ImportRecord).init(allocator);
|
||||
parser.import_records_for_current_part = List(u32).init(allocator);
|
||||
parser.export_star_import_records = List(u32).init(allocator);
|
||||
parser.import_items_for_namespace = Map(js_ast.Ref, Map(string, js_ast.LocRef)).init(allocator);
|
||||
parser.named_imports = Map(js_ast.Ref, js_ast.NamedImport).init(allocator);
|
||||
parser.top_level_symbol_to_parts = Map(js_ast.Ref, List(u32)).init(allocator);
|
||||
parser.import_namespace_cc_map = Map(ImportNamespaceCallOrConstruct, bool).init(allocator);
|
||||
parser.scopes_in_order = List(ScopeOrder).init(allocator);
|
||||
parser.temp_refs_to_declare = List(TempRef).init(allocator);
|
||||
parser.relocated_top_level_vars = List(js_ast.LocRef).init(allocator);
|
||||
parser.allocated_names = @TypeOf(parser.allocated_names).init(allocator);
|
||||
parser.scopes_for_current_part = @TypeOf(parser.scopes_for_current_part).init(allocator);
|
||||
parser.symbols = @TypeOf(parser.symbols).init(allocator);
|
||||
parser.ts_use_counts = @TypeOf(parser.ts_use_counts).init(allocator);
|
||||
parser.declared_symbols = @TypeOf(parser.declared_symbols).init(allocator);
|
||||
parser.known_enum_values = @TypeOf(parser.known_enum_values).init(allocator);
|
||||
parser.import_records = @TypeOf(parser.import_records).init(allocator);
|
||||
parser.import_records_for_current_part = @TypeOf(parser.import_records_for_current_part).init(allocator);
|
||||
parser.export_star_import_records = @TypeOf(parser.export_star_import_records).init(allocator);
|
||||
parser.import_items_for_namespace = @TypeOf(parser.import_items_for_namespace).init(allocator);
|
||||
parser.named_imports = @TypeOf(parser.named_imports).init(allocator);
|
||||
parser.top_level_symbol_to_parts = @TypeOf(parser.top_level_symbol_to_parts).init(allocator);
|
||||
parser.import_namespace_cc_map = @TypeOf(parser.import_namespace_cc_map).init(allocator);
|
||||
parser.scopes_in_order = @TypeOf(parser.scopes_in_order).init(allocator);
|
||||
parser.temp_refs_to_declare = @TypeOf(parser.temp_refs_to_declare).init(allocator);
|
||||
parser.relocated_top_level_vars = @TypeOf(parser.relocated_top_level_vars).init(allocator);
|
||||
parser.log = log;
|
||||
parser.allocator = allocator;
|
||||
parser.options = opts;
|
||||
parser.source = source;
|
||||
parser.lexer = lexer;
|
||||
|
||||
return parser;
|
||||
}
|
||||
};
|
||||
|
||||
test "js_parser.init" {
|
||||
try alloc.setup(std.heap.page_allocator);
|
||||
|
||||
const entryPointName = "/bacon/hello.js";
|
||||
const code = "for (let i = 0; i < 100; i++) { console.log(\"hi\");\n}";
|
||||
var parser = try Parser.init(try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code), alloc.dynamic);
|
||||
const res = try parser.parse();
|
||||
}
|
||||
|
||||
@@ -25,7 +25,15 @@ pub const Kind = enum {
|
||||
}
|
||||
};
|
||||
|
||||
pub const Loc = i32;
|
||||
pub const Loc = struct {
|
||||
start: i32 = -1,
|
||||
|
||||
pub const Empty = Loc{ .start = -1 };
|
||||
|
||||
pub fn eql(loc: *Loc, other: Loc) bool {
|
||||
return loc.start == other.start;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Location = struct {
|
||||
file: string,
|
||||
@@ -52,10 +60,10 @@ pub const Location = struct {
|
||||
if (_source) |source| {
|
||||
var data = source.initErrorPosition(r.loc);
|
||||
return Location{
|
||||
.file = source.path.pretty_path,
|
||||
.file = source.path.pretty,
|
||||
.namespace = source.path.namespace,
|
||||
.line = usize2Loc(data.line_count),
|
||||
.column = usize2Loc(data.column_count),
|
||||
.line = usize2Loc(data.line_count).start,
|
||||
.column = usize2Loc(data.column_count).start,
|
||||
.length = source.contents.len,
|
||||
.line_text = source.contents[data.line_start..data.line_end],
|
||||
};
|
||||
@@ -87,9 +95,9 @@ pub const Msg = struct {
|
||||
};
|
||||
|
||||
pub const Range = struct {
|
||||
loc: Loc = 0,
|
||||
loc: Loc = Loc.Empty,
|
||||
len: i32 = 0,
|
||||
const Empty = Range{ .loc = 0, .len = 0 };
|
||||
pub const None = Range{ .loc = Loc.Empty, .len = 0 };
|
||||
};
|
||||
|
||||
pub const Log = struct {
|
||||
@@ -174,10 +182,10 @@ pub const Log = struct {
|
||||
};
|
||||
|
||||
pub fn usize2Loc(loc: usize) Loc {
|
||||
if (loc > std.math.maxInt(Loc)) {
|
||||
return 9999;
|
||||
if (loc > std.math.maxInt(i32)) {
|
||||
return Loc.Empty;
|
||||
} else {
|
||||
return @intCast(Loc, loc);
|
||||
return Loc{ .start = @intCast(i32, loc) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,17 +203,23 @@ pub const Source = struct {
|
||||
|
||||
pub fn initFile(file: fs.File, allocator: *std.mem.Allocator) Source {
|
||||
std.debug.assert(file.contents != null);
|
||||
return Source{ .path = path, .identifier_name = file.path.name.nonUniqueNameString(allocator) catch unreachable, .contents = file.contents };
|
||||
var name = file.path.name;
|
||||
var identifier_name = name.nonUniqueNameString(allocator) catch unreachable;
|
||||
if (file.contents) |contents| {
|
||||
return Source{ .path = file.path, .identifier_name = identifier_name, .contents = contents };
|
||||
} else {
|
||||
std.debug.panic("Expected file.contents to not be null. {s}", .{file});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn initPathString(pathString: string, contents: string) Source {
|
||||
const path = fs.Path.init(pathString);
|
||||
var path = fs.Path.init(pathString);
|
||||
return Source{ .path = path, .identifier_name = path.name.base, .contents = contents };
|
||||
}
|
||||
|
||||
pub fn initErrorPosition(self: *const Source, _offset: Loc) ErrorPosition {
|
||||
var prev_code_point: u21 = 0;
|
||||
var offset: usize = if (_offset < 0) 0 else @intCast(usize, _offset);
|
||||
var offset: usize = if (_offset.start < 0) 0 else @intCast(usize, _offset.start);
|
||||
|
||||
const contents = self.contents;
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const std = @import("std");
|
||||
const log = @import("logger.zig");
|
||||
const fs = @import("fs.zig");
|
||||
const alloc = @import("alloc.zig");
|
||||
|
||||
usingnamespace @import("strings.zig");
|
||||
|
||||
@@ -47,19 +48,17 @@ pub const TransformOptions = struct {
|
||||
inject: ?[]string = null,
|
||||
public_url: string = "/",
|
||||
filesystem_cache: std.StringHashMap(fs.File),
|
||||
entry_point: *fs.File,
|
||||
entry_point: fs.File,
|
||||
resolve_paths: bool = false,
|
||||
|
||||
pub fn initUncached(allocator: *std.mem.Allocator, entryPointName: string, code: string) !TransformOptions {
|
||||
assert(entryPointName.len > 0);
|
||||
|
||||
const filesystemCache = std.StringHashMap(string).init(allocator);
|
||||
var filesystemCache = std.StringHashMap(fs.File).init(allocator);
|
||||
|
||||
var entryPoint = !allocator.Create(fs.file);
|
||||
entryPoint.path = fs.Path.init(entryPointName, allocator);
|
||||
entryPoint.contents = code;
|
||||
var entryPoint = fs.File{ .path = fs.Path.init(entryPointName), .contents = code, .mtime = null };
|
||||
|
||||
const define = std.StringHashMap(string).init(allocator);
|
||||
var define = std.StringHashMap(string).init(allocator);
|
||||
try define.ensureCapacity(1);
|
||||
|
||||
define.putAssumeCapacity("process.env.NODE_ENV", "development");
|
||||
@@ -89,3 +88,12 @@ pub const OutputFile = struct {
|
||||
};
|
||||
|
||||
pub const TransformResult = struct { errors: []log.Msg, warnings: []log.Msg, output_files: []OutputFile };
|
||||
|
||||
test "TransformOptions.initUncached" {
|
||||
try alloc.setup(std.heap.page_allocator);
|
||||
const opts = try TransformOptions.initUncached(alloc.dynamic, "lol.jsx", "<Hi />");
|
||||
|
||||
std.testing.expectEqualStrings("lol", opts.entry_point.path.name.base);
|
||||
std.testing.expectEqualStrings(".jsx", opts.entry_point.path.name.ext);
|
||||
std.testing.expect(Loader.jsx == opts.loader);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
const mutable = @import("string_mutable.zig");
|
||||
const std = @import("std");
|
||||
|
||||
pub usingnamespace @import("string_types.zig");
|
||||
|
||||
pub const strings = @import("string_immutable.zig");
|
||||
|
||||
pub const MutableString = mutable.MutableString;
|
||||
|
||||
pub const eql = std.meta.eql;
|
||||
|
||||
pub fn nql(a: anytype, b: @TypeOf(a)) bool {
|
||||
return !eql(a, b);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user