mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
wip wip
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
.DS_Store
|
||||
zig-cache
|
||||
*.wasm
|
||||
|
||||
|
||||
27
src/alloc.zig
Normal file
27
src/alloc.zig
Normal file
@@ -0,0 +1,27 @@
|
||||
const std = @import("std");
|
||||
|
||||
const STATIC_MEMORY_SIZE = 256000;
|
||||
pub var static_manager: ?std.heap.FixedBufferAllocator = null;
|
||||
pub var dynamic_manager: ?std.heap.ArenaAllocator = null;
|
||||
pub var root_manager: ?std.heap.ArenaAllocator = null;
|
||||
pub var static: *std.mem.Allocator = undefined;
|
||||
pub var dynamic: *std.mem.Allocator = undefined;
|
||||
|
||||
pub fn setup(root: *std.mem.Allocator) !void {
|
||||
root_manager = std.heap.ArenaAllocator.init(root);
|
||||
var buf = try root_manager.?.child_allocator.alloc(u8, STATIC_MEMORY_SIZE);
|
||||
dynamic_manager = std.heap.ArenaAllocator.init(root_manager.?.child_allocator);
|
||||
static_manager = std.heap.FixedBufferAllocator.init(buf);
|
||||
static = root_manager.?.child_allocator;
|
||||
|
||||
dynamic_manager = std.heap.ArenaAllocator.init(root);
|
||||
dynamic = dynamic_manager.?.child_allocator;
|
||||
|
||||
// static = @ptrCast(*std.mem.Allocator, &stat.allocator);
|
||||
}
|
||||
|
||||
test "GlobalAllocator" {
|
||||
try setup(std.heap.page_allocator);
|
||||
var testType = try static.alloc(u8, 10);
|
||||
testType[1] = 1;
|
||||
}
|
||||
10
src/bundler.zig
Normal file
10
src/bundler.zig
Normal file
@@ -0,0 +1,10 @@
|
||||
const std = @import("std");
|
||||
const options = @import("options.zig");
|
||||
|
||||
pub const Bundler = struct {
|
||||
options: options.TransformOptions,
|
||||
|
||||
|
||||
pub fn
|
||||
|
||||
}
|
||||
76
src/fs.zig
Normal file
76
src/fs.zig
Normal file
@@ -0,0 +1,76 @@
|
||||
const std = @import("std");
|
||||
const strings = @import("strings.zig");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
pub const FileSystem = struct { tree: std.AutoHashMap(FileSystemEntry) };
|
||||
|
||||
pub const FileSystemEntry = union(enum) {
|
||||
file: File,
|
||||
directory: Directory,
|
||||
};
|
||||
|
||||
pub const File = struct { path: Path, mtime: ?usize, contents: ?[]u8 };
|
||||
pub const Directory = struct { path: Path, mtime: ?usize, contents: []FileSystemEntry };
|
||||
|
||||
pub const PathName = struct {
|
||||
base: []u8,
|
||||
dir: []u8,
|
||||
ext: []u8,
|
||||
|
||||
pub fn init(_path: []u8) PathName {
|
||||
var path = _path;
|
||||
var base: []u8 = path;
|
||||
var dir: []u8 = path;
|
||||
var ext: []u8 = path;
|
||||
|
||||
var _i = strings.lastIndexOfChar(path, '/');
|
||||
while (_i) |i| {
|
||||
// Stop if we found a non-trailing slash
|
||||
if (i + 1 != path.len) {
|
||||
base = path[i + 1 ..];
|
||||
dir = path[0..i];
|
||||
break;
|
||||
}
|
||||
|
||||
// Ignore trailing slashes
|
||||
path = path[0..i];
|
||||
|
||||
_i = strings.lastIndexOfChar(path, '/');
|
||||
}
|
||||
|
||||
// Strip off the extension
|
||||
var _dot = strings.lastIndexOfChar(base, '.');
|
||||
if (_dot) |dot| {
|
||||
ext = base[dot..];
|
||||
base = base[0..dot];
|
||||
}
|
||||
|
||||
return PathName{
|
||||
.dir = dir,
|
||||
.base = base,
|
||||
.ext = ext,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Path = struct {
|
||||
pretty_path: []u8,
|
||||
text: []u8,
|
||||
namespace: []u8,
|
||||
path_disabled: []u8,
|
||||
|
||||
pub fn isBefore(a: *Path, b: Path) bool {
|
||||
return a.namespace > b.namespace ||
|
||||
(a.namespace == b.namespace and (a.text < b.text ||
|
||||
(a.text == b.text and (a.flags < b.flags ||
|
||||
(a.flags == b.flags)))));
|
||||
}
|
||||
};
|
||||
|
||||
test "PathName.init" {
|
||||
var file = "/root/directory/file.ext".*;
|
||||
const res = PathName.init(&file);
|
||||
std.testing.expectEqualStrings(res.dir, "/root/directory");
|
||||
std.testing.expectEqualStrings(res.base, "file");
|
||||
std.testing.expectEqualStrings(res.ext, ".ext");
|
||||
}
|
||||
64
src/import_record.zig
Normal file
64
src/import_record.zig
Normal file
@@ -0,0 +1,64 @@
|
||||
const fs = @import("fs.zig");
|
||||
const logger = @import("logger.zig");
|
||||
|
||||
export const ImportKind = enum(u8) {
|
||||
|
||||
// An entry point provided by the user
|
||||
entry_point,
|
||||
|
||||
// An ES6 import or re-export statement
|
||||
stmt,
|
||||
|
||||
// A call to "require()"
|
||||
require,
|
||||
|
||||
// An "import()" expression with a string argument
|
||||
dynamic,
|
||||
|
||||
// A call to "require.resolve()"
|
||||
require_resolve,
|
||||
|
||||
// A CSS "@import" rule
|
||||
at,
|
||||
|
||||
// A CSS "@import" rule with import conditions
|
||||
at_conditional,
|
||||
|
||||
// A CSS "url(...)" token
|
||||
url,
|
||||
};
|
||||
|
||||
pub const ImportRecord = struct {
|
||||
range: logger.Range,
|
||||
path: fs.Path,
|
||||
|
||||
// Sometimes the parser creates an import record and decides it isn't needed.
|
||||
// For example, TypeScript code may have import statements that later turn
|
||||
// out to be type-only imports after analyzing the whole file.
|
||||
is_unused: bool,
|
||||
|
||||
// If this is true, the import contains syntax like "* as ns". This is used
|
||||
// to determine whether modules that have no exports need to be wrapped in a
|
||||
// CommonJS wrapper or not.
|
||||
contains_import_star: bool,
|
||||
|
||||
// If this is true, the import contains an import for the alias "default",
|
||||
// either via the "import x from" or "import {default as x} from" syntax.
|
||||
contains_default_alias: bool,
|
||||
|
||||
// If true, this "export * from 'path'" statement is evaluated at run-time by
|
||||
// calling the "__reExport()" helper function
|
||||
calls_run_time_re_export_fn: bool,
|
||||
|
||||
// Tell the printer to wrap this call to "require()" in "__toModule(...)"
|
||||
wrap_with_to_module: bool,
|
||||
|
||||
// True for require calls like this: "try { require() } catch {}". In this
|
||||
// case we shouldn't generate an error if the path could not be resolved.
|
||||
is_inside_try_body: bool,
|
||||
|
||||
// If true, this was originally written as a bare "import 'file'" statement
|
||||
was_originally_bare_import: bool,
|
||||
|
||||
kind: ImportKind,
|
||||
};
|
||||
390
src/js_ast.zig
Normal file
390
src/js_ast.zig
Normal file
@@ -0,0 +1,390 @@
|
||||
const std = @import("std");
|
||||
const logger = @import("logger.zig");
|
||||
|
||||
pub const NodeIndex = u32;
|
||||
pub const NodeIndexNone = 4294967293;
|
||||
|
||||
pub const DataIndex = u16;
|
||||
pub const DataIndexNone = 65533;
|
||||
|
||||
pub const BindingNodeIndex = NodeIndex;
|
||||
pub const StmtNodeIndex = NodeIndex;
|
||||
pub const ExprNodeIndex = NodeIndex;
|
||||
|
||||
pub const Comment = struct { text: []u8 };
|
||||
|
||||
pub const FnBody = struct {
|
||||
loc: logger.Loc,
|
||||
stmts: []StmtNodeIndex,
|
||||
};
|
||||
|
||||
pub const Fn = struct {
|
||||
name: NodeIndex = NodeIndexNone,
|
||||
open_parens_loc: logger.Loc,
|
||||
args: []Arg,
|
||||
body: FnBody,
|
||||
|
||||
is_async: bool,
|
||||
is_generator: bool,
|
||||
has_rest_arg: bool,
|
||||
has_if_scope: bool,
|
||||
|
||||
// This is true if the function is a method
|
||||
is_unique_formal_parameters: bool,
|
||||
};
|
||||
|
||||
pub const BindingType = enum {
|
||||
b_missing,
|
||||
b_identifier,
|
||||
b_array,
|
||||
b_object,
|
||||
};
|
||||
|
||||
pub const Property = struct {
|
||||
pub const Kind = enum {
|
||||
normal,
|
||||
get,
|
||||
set,
|
||||
spread,
|
||||
};
|
||||
|
||||
key: NodeIndex,
|
||||
value: NodeIndex = NodeIndexNone,
|
||||
initializer: Kind = Kind.normal,
|
||||
is_computed: bool,
|
||||
is_method: bool,
|
||||
is_static: bool,
|
||||
was_shorthand: bool,
|
||||
};
|
||||
|
||||
pub const Arg = struct {
|
||||
ts_decorators: []NodeIndex,
|
||||
binding: Binding,
|
||||
default: NodeIndex = NodeIndexNone,
|
||||
|
||||
// "constructor(public x: boolean) {}"
|
||||
is_typescript_ctor_field: bool,
|
||||
};
|
||||
|
||||
pub const Try = struct {};
|
||||
pub const Binding = struct {};
|
||||
|
||||
pub const Class = struct {
|
||||
class_keyword: logger.Range,
|
||||
ts_decorators: []NodeIndex,
|
||||
name: logger.Loc,
|
||||
extends: NodeIndex = NodeIndexNone,
|
||||
body_loc: logger.Loc,
|
||||
properties: []Property,
|
||||
};
|
||||
|
||||
pub const Expr = struct {
|
||||
pub const Array = struct {
|
||||
items: []ExprNodeIndex,
|
||||
comma_after_spread: logger.Loc,
|
||||
is_parenthesized: bool,
|
||||
};
|
||||
|
||||
pub const Unary = struct {
|
||||
op: Op.Code,
|
||||
};
|
||||
|
||||
// TODO: THIS IS WHERE YOU LEFT OFF!
|
||||
// pub const Binary = {}
|
||||
};
|
||||
|
||||
pub const Op = struct {
|
||||
// If you add a new token, remember to add it to "OpTable" too
|
||||
const Code = enum {
|
||||
// Prefix
|
||||
un_pos,
|
||||
un_neg,
|
||||
un_cpl,
|
||||
un_not,
|
||||
un_void,
|
||||
un_typeof,
|
||||
un_delete,
|
||||
|
||||
// Prefix update
|
||||
un_pre_dec,
|
||||
un_pre_inc,
|
||||
|
||||
// Postfix update
|
||||
un_post_dec,
|
||||
un_post_inc,
|
||||
|
||||
// Left-associative
|
||||
bin_add,
|
||||
bin_sub,
|
||||
bin_mul,
|
||||
bin_div,
|
||||
bin_rem,
|
||||
bin_pow,
|
||||
bin_lt,
|
||||
bin_le,
|
||||
bin_gt,
|
||||
bin_ge,
|
||||
bin_in,
|
||||
bin_instanceof,
|
||||
bin_shl,
|
||||
bin_shr,
|
||||
bin_u_shr,
|
||||
bin_loose_eq,
|
||||
bin_loose_ne,
|
||||
bin_strict_eq,
|
||||
bin_strict_ne,
|
||||
bin_nullish_coalescing,
|
||||
bin_logical_or,
|
||||
bin_logical_and,
|
||||
bin_bitwise_or,
|
||||
bin_bitwise_and,
|
||||
bin_bitwise_xor,
|
||||
|
||||
// Non-associative
|
||||
bin_comma,
|
||||
|
||||
// Right-associative
|
||||
bin_assign,
|
||||
bin_add_assign,
|
||||
bin_sub_assign,
|
||||
bin_mul_assign,
|
||||
bin_div_assign,
|
||||
bin_rem_assign,
|
||||
bin_pow_assign,
|
||||
bin_shl_assign,
|
||||
bin_shr_assign,
|
||||
bin_u_shr_assign,
|
||||
bin_bitwise_or_assign,
|
||||
bin_bitwise_and_assign,
|
||||
bin_bitwise_xor_assign,
|
||||
bin_nullish_coalescing_assign,
|
||||
bin_logical_or_assign,
|
||||
bin_logical_and_assign,
|
||||
};
|
||||
|
||||
const Level = enum {
|
||||
lowest,
|
||||
comma,
|
||||
spread,
|
||||
yield,
|
||||
assign,
|
||||
conditional,
|
||||
nullish_coalescing,
|
||||
logical_or,
|
||||
logical_and,
|
||||
bitwise_or,
|
||||
bitwise_xor,
|
||||
bitwise_and,
|
||||
equals,
|
||||
compare,
|
||||
shift,
|
||||
add,
|
||||
multiply,
|
||||
exponentiation,
|
||||
prefix,
|
||||
postfix,
|
||||
new,
|
||||
call,
|
||||
member,
|
||||
};
|
||||
|
||||
text: string,
|
||||
level: Level,
|
||||
is_keyword: bool,
|
||||
|
||||
const Table = []Op{
|
||||
// Prefix
|
||||
.{ "+", Level.prefix, false },
|
||||
.{ "-", Level.prefix, false },
|
||||
.{ "~", Level.prefix, false },
|
||||
.{ "!", Level.prefix, false },
|
||||
.{ "void", Level.prefix, true },
|
||||
.{ "typeof", Level.prefix, true },
|
||||
.{ "delete", Level.prefix, true },
|
||||
|
||||
// Prefix update
|
||||
.{ "--", Level.prefix, false },
|
||||
.{ "++", Level.prefix, false },
|
||||
|
||||
// Postfix update
|
||||
.{ "--", Level.postfix, false },
|
||||
.{ "++", Level.postfix, false },
|
||||
|
||||
// Left-associative
|
||||
.{ "+", Level.add, false },
|
||||
.{ "-", Level.add, false },
|
||||
.{ "*", Level.multiply, false },
|
||||
.{ "/", Level.multiply, false },
|
||||
.{ "%", Level.multiply, false },
|
||||
.{ "**", Level.exponentiation, false }, // Right-associative
|
||||
.{ "<", Level.compare, false },
|
||||
.{ "<=", Level.compare, false },
|
||||
.{ ">", Level.compare, false },
|
||||
.{ ">=", Level.compare, false },
|
||||
.{ "in", Level.compare, true },
|
||||
.{ "instanceof", Level.compare, true },
|
||||
.{ "<<", Level.shift, false },
|
||||
.{ ">>", Level.shift, false },
|
||||
.{ ">>>", Level.shift, false },
|
||||
.{ "==", Level.equals, false },
|
||||
.{ "!=", Level.equals, false },
|
||||
.{ "===", Level.equals, false },
|
||||
.{ "!==", Level.equals, false },
|
||||
.{ "??", Level.nullish_coalescing, false },
|
||||
.{ "||", Level.logical_or, false },
|
||||
.{ "&&", Level.logical_and, false },
|
||||
.{ "|", Level.bitwise_or, false },
|
||||
.{ "&", Level.bitwise_and, false },
|
||||
.{ "^", Level.bitwise_xor, false },
|
||||
|
||||
// Non-associative
|
||||
.{ ",", LComma, false },
|
||||
|
||||
// Right-associative
|
||||
.{ "=", Level.assign, false },
|
||||
.{ "+=", Level.assign, false },
|
||||
.{ "-=", Level.assign, false },
|
||||
.{ "*=", Level.assign, false },
|
||||
.{ "/=", Level.assign, false },
|
||||
.{ "%=", Level.assign, false },
|
||||
.{ "**=", Level.assign, false },
|
||||
.{ "<<=", Level.assign, false },
|
||||
.{ ">>=", Level.assign, false },
|
||||
.{ ">>>=", Level.assign, false },
|
||||
.{ "|=", Level.assign, false },
|
||||
.{ "&=", Level.assign, false },
|
||||
.{ "^=", Level.assign, false },
|
||||
.{ "??=", Level.assign, false },
|
||||
.{ "||=", Level.assign, false },
|
||||
.{ "&&=", Level.assign, false },
|
||||
};
|
||||
};
|
||||
|
||||
pub const ArrayBinding = struct {
|
||||
binding: BindingNodeIndex,
|
||||
default_value: ExprNodeIndex = NodeIndexNone,
|
||||
};
|
||||
|
||||
pub const Node = struct {
|
||||
pub const Tag = enum {
|
||||
s_block,
|
||||
s_comment,
|
||||
s_debugger,
|
||||
s_directive,
|
||||
s_empty,
|
||||
s_type_script,
|
||||
s_export_clause,
|
||||
s_export_from,
|
||||
s_export_default,
|
||||
s_export_star,
|
||||
s_export_equals,
|
||||
s_lazy_export,
|
||||
s_expr,
|
||||
s_enum,
|
||||
s_namespace,
|
||||
s_function,
|
||||
s_class,
|
||||
s_label,
|
||||
s_if,
|
||||
s_for,
|
||||
s_for_in,
|
||||
s_for_of,
|
||||
s_do_while,
|
||||
s_while,
|
||||
s_with,
|
||||
s_try,
|
||||
s_switch,
|
||||
s_import,
|
||||
s_return,
|
||||
s_throw,
|
||||
s_local,
|
||||
s_break,
|
||||
s_continue,
|
||||
|
||||
e_array,
|
||||
e_unary,
|
||||
e_binary,
|
||||
e_boolean,
|
||||
e_super,
|
||||
e_null,
|
||||
e_undefined,
|
||||
e_this,
|
||||
e_new,
|
||||
e_new_target,
|
||||
e_import_meta,
|
||||
e_call,
|
||||
e_dot,
|
||||
e_index,
|
||||
e_arrow,
|
||||
e_function,
|
||||
e_class,
|
||||
e_identifier,
|
||||
e_import_identifier,
|
||||
e_private_identifier,
|
||||
ejsx_element,
|
||||
e_missing,
|
||||
e_number,
|
||||
e_big_int,
|
||||
e_object,
|
||||
e_spread,
|
||||
e_string,
|
||||
e_template,
|
||||
e_reg_exp,
|
||||
e_await,
|
||||
e_yield,
|
||||
e_if,
|
||||
e_require,
|
||||
e_require_resolve,
|
||||
e_import,
|
||||
};
|
||||
|
||||
// Source code location of the AST node.
|
||||
loc: logger.Loc,
|
||||
// this is relatively common.
|
||||
is_single_line: bool,
|
||||
|
||||
//
|
||||
child: NodeIndex = NodeIndexNone,
|
||||
extra_data: ?[]NodeIndex,
|
||||
data_index: u16,
|
||||
};
|
||||
|
||||
pub const AST = struct {
|
||||
node_tags: std.ArrayList(Node.Tag),
|
||||
};
|
||||
|
||||
pub const Span = struct {
|
||||
text: []u8,
|
||||
range: logger.Range,
|
||||
};
|
||||
|
||||
pub const ExportsKind = enum {
|
||||
// This file doesn't have any kind of export, so it's impossible to say what
|
||||
// kind of file this is. An empty file is in this category, for example.
|
||||
none,
|
||||
|
||||
// The exports are stored on "module" and/or "exports". Calling "require()"
|
||||
// on this module returns "module.exports". All imports to this module are
|
||||
// allowed but may return undefined.
|
||||
cjs,
|
||||
|
||||
// All export names are known explicitly. Calling "require()" on this module
|
||||
// generates an exports object (stored in "exports") with getters for the
|
||||
// export names. Named imports to this module are only allowed if they are
|
||||
// in the set of export names.
|
||||
esm,
|
||||
|
||||
// Some export names are known explicitly, but others fall back to a dynamic
|
||||
// run-time object. This is necessary when using the "export * from" syntax
|
||||
// with either a CommonJS module or an external module (i.e. a module whose
|
||||
// export names are not known at compile-time).
|
||||
//
|
||||
// Calling "require()" on this module generates an exports object (stored in
|
||||
// "exports") with getters for the export names. All named imports to this
|
||||
// module are allowed. Direct named imports reference the corresponding export
|
||||
// directly. Other imports go through property accesses on "exports".
|
||||
esm_with_dyn };
|
||||
|
||||
pub fn isDynamicExport(exp: ExportsKind) bool {
|
||||
return kind == .cjs || kind == .esm_with_dyn;
|
||||
}
|
||||
156
src/js_lexer.zig
Normal file
156
src/js_lexer.zig
Normal file
@@ -0,0 +1,156 @@
|
||||
const std = @import("std");
|
||||
const logger = @import("logger.zig");
|
||||
const tables = @import("js_lexer_tables.zig");
|
||||
const unicode = std.unicode;
|
||||
|
||||
const Source = logger.Source;
|
||||
pub const T = tables.T;
|
||||
pub const CodePoint = tables.CodePoint;
|
||||
pub const Keywords = tables.Keywords;
|
||||
pub const tokenToString = tables.tokenToString;
|
||||
pub const jsxEntity = tables.jsxEntity;
|
||||
|
||||
pub const Lexer = struct {
|
||||
log: logger.Log,
|
||||
source: logger.Source,
|
||||
current: usize = 0,
|
||||
start: usize = 0,
|
||||
end: usize = 0,
|
||||
approximate_newline_count: i32 = 0,
|
||||
legacy_octal_loc: logger.Loc = 0,
|
||||
previous_backslash_quote_in_jsx: logger.Range = logger.Range{},
|
||||
token: T = T.t_end_of_file,
|
||||
has_newline_before: bool = false,
|
||||
has_pure_comment_before: bool = false,
|
||||
preserve_all_comments_before: bool = false,
|
||||
is_legacy_octal_literal: bool = false,
|
||||
// comments_to_preserve_before: []js_ast.Comment,
|
||||
// all_original_comments: []js_ast.Comment,
|
||||
code_point: CodePoint = 0,
|
||||
string_literal: []u16,
|
||||
identifier: []u8 = "",
|
||||
// jsx_factory_pragma_comment: js_ast.Span,
|
||||
// jsx_fragment_pragma_comment: js_ast.Span,
|
||||
// source_mapping_url: js_ast.Span,
|
||||
number: f64 = 0.0,
|
||||
rescan_close_brace_as_template_token: bool = false,
|
||||
for_global_name: bool = false,
|
||||
prev_error_loc: i32 = -1,
|
||||
fn nextCodepointSlice(it: *Lexer) callconv(.Inline) ?[]const u8 {
|
||||
if (it.current >= it.source.contents.len) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cp_len = unicode.utf8ByteSequenceLength(it.source.contents[it.current]) catch unreachable;
|
||||
it.end = it.current;
|
||||
it.current += cp_len;
|
||||
|
||||
return it.source.contents[it.current - cp_len .. it.current];
|
||||
}
|
||||
|
||||
pub fn addError(self: *Lexer, loc: logger.Loc, text: []u8) void {
|
||||
if (loc == self.prevErrorLoc) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.prev_error_loc = loc;
|
||||
}
|
||||
|
||||
pub fn codePointEql(self: *Lexer, a: u8) bool {
|
||||
return @intCast(CodePoint, a) == self.code_point;
|
||||
}
|
||||
|
||||
fn nextCodepoint(it: *Lexer) callconv(.Inline) CodePoint {
|
||||
const slice = it.nextCodepointSlice() orelse return @as(CodePoint, 0);
|
||||
|
||||
switch (slice.len) {
|
||||
1 => return @as(CodePoint, slice[0]),
|
||||
2 => return @as(CodePoint, unicode.utf8Decode2(slice) catch unreachable),
|
||||
3 => return @as(CodePoint, unicode.utf8Decode3(slice) catch unreachable),
|
||||
4 => return @as(CodePoint, unicode.utf8Decode4(slice) catch unreachable),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Look ahead at the next n codepoints without advancing the iterator.
|
||||
/// If fewer than n codepoints are available, then return the remainder of the string.
|
||||
fn peek(it: *Lexer, n: usize) []const u8 {
|
||||
const original_i = it.current;
|
||||
defer it.current = original_i;
|
||||
|
||||
var end_ix = original_i;
|
||||
var found: usize = 0;
|
||||
while (found < n) : (found += 1) {
|
||||
const next_codepoint = it.nextCodepointSlice() orelse return it.source.contents[original_i..];
|
||||
end_ix += next_codepoint.len;
|
||||
}
|
||||
|
||||
return it.source.contents[original_i..end_ix];
|
||||
}
|
||||
|
||||
fn step(lexer: *Lexer) void {
|
||||
lexer.code_point = lexer.nextCodepoint();
|
||||
|
||||
// Track the approximate number of newlines in the file so we can preallocate
|
||||
// the line offset table in the printer for source maps. The line offset table
|
||||
// is the #1 highest allocation in the heap profile, so this is worth doing.
|
||||
// This count is approximate because it handles "\n" and "\r\n" (the common
|
||||
// cases) but not "\r" or "\u2028" or "\u2029". Getting this wrong is harmless
|
||||
// because it's only a preallocation. The array will just grow if it's too small.
|
||||
if (lexer.code_point == '\n') {
|
||||
lexer.approximate_newline_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect(self: *Lexer, token: T) void {
|
||||
if (self.token != token) {
|
||||
lexer.expected(token);
|
||||
}
|
||||
|
||||
lexer.next();
|
||||
}
|
||||
|
||||
pub fn expectOrInsertSemicolon(lexer: *Lexer) void {
|
||||
if (lexer.token == T.semicolon || (!lexer.has_newline_before and
|
||||
lexer.token != T.close_brace and lexer.token != T.t_end_of_file))
|
||||
{
|
||||
lexer.expect(T.semicolon);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(self: *Lexer) void {}
|
||||
|
||||
pub fn init(log: logger.Log, source: logger.Source) Lexer {
|
||||
var string_literal = [1]u16{0};
|
||||
|
||||
var lex = Lexer{
|
||||
.log = log,
|
||||
.source = source,
|
||||
.string_literal = &string_literal,
|
||||
.prev_error_loc = -1,
|
||||
};
|
||||
lex.step();
|
||||
lex.next();
|
||||
|
||||
return lex;
|
||||
}
|
||||
};
|
||||
|
||||
test "Lexer.step()" {
|
||||
const msgs = std.ArrayList(logger.Msg).init(std.testing.allocator);
|
||||
const log = logger.Log{
|
||||
.msgs = msgs,
|
||||
};
|
||||
|
||||
var sourcefile = "for (let i = 0; i < 100; i++) { console.log('hi'); }".*;
|
||||
var identifier_name = "loop".*;
|
||||
defer std.testing.allocator.free(msgs.items);
|
||||
const source = logger.Source{ .index = 0, .contents = &sourcefile, .identifier_name = &identifier_name };
|
||||
|
||||
var lex = Lexer.init(log, source);
|
||||
std.testing.expect('f' == lex.code_point);
|
||||
lex.step();
|
||||
std.testing.expect('o' == lex.code_point);
|
||||
lex.step();
|
||||
std.testing.expect('r' == lex.code_point);
|
||||
}
|
||||
714
src/js_lexer_tables.zig
Normal file
714
src/js_lexer_tables.zig
Normal file
@@ -0,0 +1,714 @@
|
||||
const std = @import("std");
|
||||
const expectString = std.testing.expectEqualStrings;
|
||||
const expect = std.testing.expect;
|
||||
const logger = @import("logger.zig");
|
||||
const unicode = std.unicode;
|
||||
const alloc = @import("alloc.zig");
|
||||
|
||||
pub const T = enum(u8) {
|
||||
t_end_of_file,
|
||||
t_syntax_error,
|
||||
|
||||
// "#!/usr/bin/env node"
|
||||
t_hashbang,
|
||||
|
||||
// literals
|
||||
t_no_substitution_template_literal, // contents are in lexer.string_literal ([]uint16)
|
||||
t_numeric_literal, // contents are in lexer.number (float64)
|
||||
t_string_literal, // contents are in lexer.string_literal ([]uint16)
|
||||
t_big_integer_literal, // contents are in lexer.identifier (string)
|
||||
|
||||
// pseudo-literals
|
||||
t_template_head, // contents are in lexer.string_literal ([]uint16)
|
||||
t_template_middle, // contents are in lexer.string_literal ([]uint16)
|
||||
t_template_tail, // contents are in lexer.string_literal ([]uint16)
|
||||
|
||||
// punctuation
|
||||
t_ampersand,
|
||||
t_ampersand_ampersand,
|
||||
t_asterisk,
|
||||
t_asterisk_asterisk,
|
||||
t_at,
|
||||
t_bar,
|
||||
t_bar_bar,
|
||||
t_caret,
|
||||
t_close_brace,
|
||||
t_close_bracket,
|
||||
t_close_paren,
|
||||
t_colon,
|
||||
t_comma,
|
||||
t_dot,
|
||||
t_dot_dot_dot,
|
||||
t_equals_equals,
|
||||
t_equals_equals_equals,
|
||||
t_equals_greater_than,
|
||||
t_exclamation,
|
||||
t_exclamation_equals,
|
||||
t_exclamation_equals_equals,
|
||||
t_greater_than,
|
||||
t_greater_than_equals,
|
||||
t_greater_than_greater_than,
|
||||
t_greater_than_greater_than_greater_than,
|
||||
t_less_than,
|
||||
t_less_than_equals,
|
||||
t_less_than_less_than,
|
||||
t_minus,
|
||||
t_minus_minus,
|
||||
t_open_brace,
|
||||
t_open_bracket,
|
||||
t_open_paren,
|
||||
t_percent,
|
||||
t_plus,
|
||||
t_plus_plus,
|
||||
t_question,
|
||||
t_question_dot,
|
||||
t_question_question,
|
||||
t_semicolon,
|
||||
t_slash,
|
||||
t_tilde,
|
||||
|
||||
// assignments (keep in sync with is_assign() below)
|
||||
t_ampersand_ampersand_equals,
|
||||
t_ampersand_equals,
|
||||
t_asterisk_asterisk_equals,
|
||||
t_asterisk_equals,
|
||||
t_bar_bar_equals,
|
||||
t_bar_equals,
|
||||
t_caret_equals,
|
||||
t_equals,
|
||||
t_greater_than_greater_than_equals,
|
||||
t_greater_than_greater_than_greater_than_equals,
|
||||
t_less_than_less_than_equals,
|
||||
t_minus_equals,
|
||||
t_percent_equals,
|
||||
t_plus_equals,
|
||||
t_question_question_equals,
|
||||
t_slash_equals,
|
||||
|
||||
// class-private fields and methods
|
||||
t_private_identifier,
|
||||
|
||||
// identifiers
|
||||
t_identifier, // contents are in lexer.identifier (string)
|
||||
t_escaped_keyword, // a keyword that has been escaped as an identifer
|
||||
|
||||
// reserved words
|
||||
t_break,
|
||||
t_case,
|
||||
t_catch,
|
||||
t_class,
|
||||
t_const,
|
||||
t_continue,
|
||||
t_debugger,
|
||||
t_default,
|
||||
t_delete,
|
||||
t_do,
|
||||
t_else,
|
||||
t_enum,
|
||||
t_export,
|
||||
t_extends,
|
||||
t_false,
|
||||
t_finally,
|
||||
t_for,
|
||||
t_function,
|
||||
t_if,
|
||||
t_import,
|
||||
t_in,
|
||||
t_instanceof,
|
||||
t_new,
|
||||
t_null,
|
||||
t_return,
|
||||
t_super,
|
||||
t_switch,
|
||||
t_this,
|
||||
t_throw,
|
||||
t_true,
|
||||
t_try,
|
||||
t_typeof,
|
||||
t_var,
|
||||
t_void,
|
||||
t_while,
|
||||
t_with,
|
||||
|
||||
pub fn isAssign() bool {
|
||||
return self >= T.t_ampersand_ampersand_equals and self <= T.t_slash_equals;
|
||||
}
|
||||
|
||||
pub fn isReservedWord() bool {
|
||||
return self >= T.t_break and self <= T.t_with;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Keywords = std.ComptimeStringMap(T, .{
|
||||
.{ "break", .t_break },
|
||||
.{ "case", .t_case },
|
||||
.{ "catch", .t_catch },
|
||||
.{ "class", .t_class },
|
||||
.{ "const", .t_const },
|
||||
.{ "continue", .t_continue },
|
||||
.{ "debugger", .t_debugger },
|
||||
.{ "default", .t_default },
|
||||
.{ "delete", .t_delete },
|
||||
.{ "do", .t_do },
|
||||
.{ "else", .t_else },
|
||||
.{ "enum", .t_enum },
|
||||
.{ "export", .t_export },
|
||||
.{ "extends", .t_extends },
|
||||
.{ "false", .t_false },
|
||||
.{ "finally", .t_finally },
|
||||
.{ "for", .t_for },
|
||||
.{ "function", .t_function },
|
||||
.{ "if", .t_if },
|
||||
.{ "import", .t_import },
|
||||
.{ "in", .t_in },
|
||||
.{ "instanceof", .t_instanceof },
|
||||
.{ "new", .t_new },
|
||||
.{ "null", .t_null },
|
||||
.{ "return", .t_return },
|
||||
.{ "super", .t_super },
|
||||
.{ "switch", .t_switch },
|
||||
.{ "this", .t_this },
|
||||
.{ "throw", .t_throw },
|
||||
.{ "true", .t_true },
|
||||
.{ "try", .t_try },
|
||||
.{ "typeof", .t_typeof },
|
||||
.{ "var", .t_var },
|
||||
.{ "void", .t_void },
|
||||
.{ "while", .t_while },
|
||||
.{ "with", .t_with },
|
||||
});
|
||||
|
||||
pub const CodePoint = u21;
|
||||
|
||||
pub const TokenEnumType = std.EnumArray(T, []u8);
|
||||
|
||||
pub const tokenToString: TokenEnumType = comptime {
|
||||
var TEndOfFile = "end of file".*;
|
||||
var TSyntaxError = "syntax error".*;
|
||||
var THashbang = "hashbang comment".*;
|
||||
|
||||
// Literals
|
||||
var TNoSubstitutionTemplateLiteral = "template literal".*;
|
||||
var TNumericLiteral = "number".*;
|
||||
var TStringLiteral = "string".*;
|
||||
var TBigIntegerLiteral = "bigint".*;
|
||||
|
||||
// Pseudo-literals
|
||||
var TTemplateHead = "template literal".*;
|
||||
var TTemplateMiddle = "template literal".*;
|
||||
var TTemplateTail = "template literal".*;
|
||||
|
||||
// Punctuation
|
||||
var TAmpersand = "\"&\"".*;
|
||||
var TAmpersandAmpersand = "\"&&\"".*;
|
||||
var TAsterisk = "\"*\"".*;
|
||||
var TAsteriskAsterisk = "\"**\"".*;
|
||||
var TAt = "\"@\"".*;
|
||||
var TBar = "\"|\"".*;
|
||||
var TBarBar = "\"||\"".*;
|
||||
var TCaret = "\"^\"".*;
|
||||
var TCloseBrace = "\"}\"".*;
|
||||
var TCloseBracket = "\"]\"".*;
|
||||
var TCloseParen = "\")\"".*;
|
||||
var TColon = "\" =\"".*;
|
||||
var TComma = "\",\"".*;
|
||||
var TDot = "\".\"".*;
|
||||
var TDotDotDot = "\"...\"".*;
|
||||
var TEqualsEquals = "\"==\"".*;
|
||||
var TEqualsEqualsEquals = "\"===\"".*;
|
||||
var TEqualsGreaterThan = "\"=>\"".*;
|
||||
var TExclamation = "\"!\"".*;
|
||||
var TExclamationEquals = "\"!=\"".*;
|
||||
var TExclamationEqualsEquals = "\"!==\"".*;
|
||||
var TGreaterThan = "\">\"".*;
|
||||
var TGreaterThanEquals = "\">=\"".*;
|
||||
var TGreaterThanGreaterThan = "\">>\"".*;
|
||||
var TGreaterThanGreaterThanGreaterThan = "\">>>\"".*;
|
||||
var TLessThan = "\"<\"".*;
|
||||
var TLessThanEquals = "\"<=\"".*;
|
||||
var TLessThanLessThan = "\"<<\"".*;
|
||||
var TMinus = "\"-\"".*;
|
||||
var TMinusMinus = "\"--\"".*;
|
||||
var TOpenBrace = "\"{\"".*;
|
||||
var TOpenBracket = "\"[\"".*;
|
||||
var TOpenParen = "\"(\"".*;
|
||||
var TPercent = "\"%\"".*;
|
||||
var TPlus = "\"+\"".*;
|
||||
var TPlusPlus = "\"++\"".*;
|
||||
var TQuestion = "\"?\"".*;
|
||||
var TQuestionDot = "\"?.\"".*;
|
||||
var TQuestionQuestion = "\"??\"".*;
|
||||
var TSemicolon = "\";\"".*;
|
||||
var TSlash = "\"/\"".*;
|
||||
var TTilde = "\"~\"".*;
|
||||
|
||||
// Assignments
|
||||
var TAmpersandAmpersandEquals = "\"&&=\"".*;
|
||||
var TAmpersandEquals = "\"&=\"".*;
|
||||
var TAsteriskAsteriskEquals = "\"**=\"".*;
|
||||
var TAsteriskEquals = "\"*=\"".*;
|
||||
var TBarBarEquals = "\"||=\"".*;
|
||||
var TBarEquals = "\"|=\"".*;
|
||||
var TCaretEquals = "\"^=\"".*;
|
||||
var TEquals = "\"=\"".*;
|
||||
var TGreaterThanGreaterThanEquals = "\">>=\"".*;
|
||||
var TGreaterThanGreaterThanGreaterThanEquals = "\">>>=\"".*;
|
||||
var TLessThanLessThanEquals = "\"<<=\"".*;
|
||||
var TMinusEquals = "\"-=\"".*;
|
||||
var TPercentEquals = "\"%=\"".*;
|
||||
var TPlusEquals = "\"+=\"".*;
|
||||
var TQuestionQuestionEquals = "\"??=\"".*;
|
||||
var TSlashEquals = "\"/=\"".*;
|
||||
|
||||
// Class-private fields and methods
|
||||
var TPrivateIdentifier = "private identifier".*;
|
||||
|
||||
// Identifiers
|
||||
var TIdentifier = "identifier".*;
|
||||
var TEscapedKeyword = "escaped keyword".*;
|
||||
|
||||
// Reserved words
|
||||
var TBreak = "\"break\"".*;
|
||||
var TCase = "\"case\"".*;
|
||||
var TCatch = "\"catch\"".*;
|
||||
var TClass = "\"class\"".*;
|
||||
var TConst = "\"const\"".*;
|
||||
var TContinue = "\"continue\"".*;
|
||||
var TDebugger = "\"debugger\"".*;
|
||||
var TDefault = "\"default\"".*;
|
||||
var TDelete = "\"delete\"".*;
|
||||
var TDo = "\"do\"".*;
|
||||
var TElse = "\"else\"".*;
|
||||
var TEnum = "\"enum\"".*;
|
||||
var TExport = "\"export\"".*;
|
||||
var TExtends = "\"extends\"".*;
|
||||
var TFalse = "\"false\"".*;
|
||||
var TFinally = "\"finally\"".*;
|
||||
var TFor = "\"for\"".*;
|
||||
var TFunction = "\"function\"".*;
|
||||
var TIf = "\"if\"".*;
|
||||
var TImport = "\"import\"".*;
|
||||
var TIn = "\"in\"".*;
|
||||
var TInstanceof = "\"instanceof\"".*;
|
||||
var TNew = "\"new\"".*;
|
||||
var TNull = "\"null\"".*;
|
||||
var TReturn = "\"return\"".*;
|
||||
var TSuper = "\"super\"".*;
|
||||
var TSwitch = "\"switch\"".*;
|
||||
var TThis = "\"this\"".*;
|
||||
var TThrow = "\"throw\"".*;
|
||||
var TTrue = "\"true\"".*;
|
||||
var TTry = "\"try\"".*;
|
||||
var TTypeof = "\"typeof\"".*;
|
||||
var TVar = "\"var\"".*;
|
||||
var TVoid = "\"void\"".*;
|
||||
var TWhile = "\"while\"".*;
|
||||
var TWith = "\"with\"".*;
|
||||
|
||||
var tokenEnums = TokenEnumType.initUndefined();
|
||||
|
||||
var eof = "end of file";
|
||||
|
||||
tokenEnums.set(T.t_end_of_file, &TEndOfFile);
|
||||
tokenEnums.set(T.t_syntax_error, &TSyntaxError);
|
||||
tokenEnums.set(T.t_hashbang, &THashbang);
|
||||
|
||||
// Literals
|
||||
tokenEnums.set(T.t_no_substitution_template_literal, &TNoSubstitutionTemplateLiteral);
|
||||
tokenEnums.set(T.t_numeric_literal, &TNumericLiteral);
|
||||
tokenEnums.set(T.t_string_literal, &TStringLiteral);
|
||||
tokenEnums.set(T.t_big_integer_literal, &TBigIntegerLiteral);
|
||||
|
||||
// Pseudo-literals
|
||||
tokenEnums.set(T.t_template_head, &TTemplateHead);
|
||||
tokenEnums.set(T.t_template_middle, &TTemplateMiddle);
|
||||
tokenEnums.set(T.t_template_tail, &TTemplateTail);
|
||||
|
||||
// Punctuation
|
||||
tokenEnums.set(T.t_ampersand, &TAmpersand);
|
||||
tokenEnums.set(T.t_ampersand_ampersand, &TAmpersandAmpersand);
|
||||
tokenEnums.set(T.t_asterisk, &TAsterisk);
|
||||
tokenEnums.set(T.t_asterisk_asterisk, &TAsteriskAsterisk);
|
||||
tokenEnums.set(T.t_at, &TAt);
|
||||
tokenEnums.set(T.t_bar, &TBar);
|
||||
tokenEnums.set(T.t_bar_bar, &TBarBar);
|
||||
tokenEnums.set(T.t_caret, &TCaret);
|
||||
tokenEnums.set(T.t_close_brace, &TCloseBrace);
|
||||
tokenEnums.set(T.t_close_bracket, &TCloseBracket);
|
||||
tokenEnums.set(T.t_close_paren, &TCloseParen);
|
||||
tokenEnums.set(T.t_colon, &TColon);
|
||||
tokenEnums.set(T.t_comma, &TComma);
|
||||
tokenEnums.set(T.t_dot, &TDot);
|
||||
tokenEnums.set(T.t_dot_dot_dot, &TDotDotDot);
|
||||
tokenEnums.set(T.t_equals_equals, &TEqualsEquals);
|
||||
tokenEnums.set(T.t_equals_equals_equals, &TEqualsEqualsEquals);
|
||||
tokenEnums.set(T.t_equals_greater_than, &TEqualsGreaterThan);
|
||||
tokenEnums.set(T.t_exclamation, &TExclamation);
|
||||
tokenEnums.set(T.t_exclamation_equals, &TExclamationEquals);
|
||||
tokenEnums.set(T.t_exclamation_equals_equals, &TExclamationEqualsEquals);
|
||||
tokenEnums.set(T.t_greater_than, &TGreaterThan);
|
||||
tokenEnums.set(T.t_greater_than_equals, &TGreaterThanEquals);
|
||||
tokenEnums.set(T.t_greater_than_greater_than, &TGreaterThanGreaterThan);
|
||||
tokenEnums.set(T.t_greater_than_greater_than_greater_than, &TGreaterThanGreaterThanGreaterThan);
|
||||
tokenEnums.set(T.t_less_than, &TLessThan);
|
||||
tokenEnums.set(T.t_less_than_equals, &TLessThanEquals);
|
||||
tokenEnums.set(T.t_less_than_less_than, &TLessThanLessThan);
|
||||
tokenEnums.set(T.t_minus, &TMinus);
|
||||
tokenEnums.set(T.t_minus_minus, &TMinusMinus);
|
||||
tokenEnums.set(T.t_open_brace, &TOpenBrace);
|
||||
tokenEnums.set(T.t_open_bracket, &TOpenBracket);
|
||||
tokenEnums.set(T.t_open_paren, &TOpenParen);
|
||||
tokenEnums.set(T.t_percent, &TPercent);
|
||||
tokenEnums.set(T.t_plus, &TPlus);
|
||||
tokenEnums.set(T.t_plus_plus, &TPlusPlus);
|
||||
tokenEnums.set(T.t_question, &TQuestion);
|
||||
tokenEnums.set(T.t_question_dot, &TQuestionDot);
|
||||
tokenEnums.set(T.t_question_question, &TQuestionQuestion);
|
||||
tokenEnums.set(T.t_semicolon, &TSemicolon);
|
||||
tokenEnums.set(T.t_slash, &TSlash);
|
||||
tokenEnums.set(T.t_tilde, &TTilde);
|
||||
|
||||
// Assignments
|
||||
tokenEnums.set(T.t_ampersand_ampersand_equals, &TAmpersandAmpersandEquals);
|
||||
tokenEnums.set(T.t_ampersand_equals, &TAmpersandEquals);
|
||||
tokenEnums.set(T.t_asterisk_asterisk_equals, &TAsteriskAsteriskEquals);
|
||||
tokenEnums.set(T.t_asterisk_equals, &TAsteriskEquals);
|
||||
tokenEnums.set(T.t_bar_bar_equals, &TBarBarEquals);
|
||||
tokenEnums.set(T.t_bar_equals, &TBarEquals);
|
||||
tokenEnums.set(T.t_caret_equals, &TCaretEquals);
|
||||
tokenEnums.set(T.t_equals, &TEquals);
|
||||
tokenEnums.set(T.t_greater_than_greater_than_equals, &TGreaterThanGreaterThanEquals);
|
||||
tokenEnums.set(T.t_greater_than_greater_than_greater_than_equals, &TGreaterThanGreaterThanGreaterThanEquals);
|
||||
tokenEnums.set(T.t_less_than_less_than_equals, &TLessThanLessThanEquals);
|
||||
tokenEnums.set(T.t_minus_equals, &TMinusEquals);
|
||||
tokenEnums.set(T.t_percent_equals, &TPercentEquals);
|
||||
tokenEnums.set(T.t_plus_equals, &TPlusEquals);
|
||||
tokenEnums.set(T.t_question_question_equals, &TQuestionQuestionEquals);
|
||||
tokenEnums.set(T.t_slash_equals, &TSlashEquals);
|
||||
|
||||
// Class-private fields and methods
|
||||
tokenEnums.set(T.t_private_identifier, &TPrivateIdentifier);
|
||||
|
||||
// Identifiers
|
||||
tokenEnums.set(T.t_identifier, &TIdentifier);
|
||||
tokenEnums.set(T.t_escaped_keyword, &TEscapedKeyword);
|
||||
|
||||
// Reserved words
|
||||
tokenEnums.set(T.t_break, &TBreak);
|
||||
tokenEnums.set(T.t_case, &TCase);
|
||||
tokenEnums.set(T.t_catch, &TCatch);
|
||||
tokenEnums.set(T.t_class, &TClass);
|
||||
tokenEnums.set(T.t_const, &TConst);
|
||||
tokenEnums.set(T.t_continue, &TContinue);
|
||||
tokenEnums.set(T.t_debugger, &TDebugger);
|
||||
tokenEnums.set(T.t_default, &TDefault);
|
||||
tokenEnums.set(T.t_delete, &TDelete);
|
||||
tokenEnums.set(T.t_do, &TDo);
|
||||
tokenEnums.set(T.t_else, &TElse);
|
||||
tokenEnums.set(T.t_enum, &TEnum);
|
||||
tokenEnums.set(T.t_export, &TExport);
|
||||
tokenEnums.set(T.t_extends, &TExtends);
|
||||
tokenEnums.set(T.t_false, &TFalse);
|
||||
tokenEnums.set(T.t_finally, &TFinally);
|
||||
tokenEnums.set(T.t_for, &TFor);
|
||||
tokenEnums.set(T.t_function, &TFunction);
|
||||
tokenEnums.set(T.t_if, &TIf);
|
||||
tokenEnums.set(T.t_import, &TImport);
|
||||
tokenEnums.set(T.t_in, &TIn);
|
||||
tokenEnums.set(T.t_instanceof, &TInstanceof);
|
||||
tokenEnums.set(T.t_new, &TNew);
|
||||
tokenEnums.set(T.t_null, &TNull);
|
||||
tokenEnums.set(T.t_return, &TReturn);
|
||||
tokenEnums.set(T.t_super, &TSuper);
|
||||
tokenEnums.set(T.t_switch, &TSwitch);
|
||||
tokenEnums.set(T.t_this, &TThis);
|
||||
tokenEnums.set(T.t_throw, &TThrow);
|
||||
tokenEnums.set(T.t_true, &TTrue);
|
||||
tokenEnums.set(T.t_try, &TTry);
|
||||
tokenEnums.set(T.t_typeof, &TTypeof);
|
||||
tokenEnums.set(T.t_var, &TVar);
|
||||
tokenEnums.set(T.t_void, &TVoid);
|
||||
tokenEnums.set(T.t_while, &TWhile);
|
||||
tokenEnums.set(T.t_with, &TWith);
|
||||
|
||||
return tokenEnums;
|
||||
};
|
||||
|
||||
pub const JSXEntityMap = std.StringHashMap(CodePoint);
|
||||
|
||||
pub var jsxEntity: JSXEntityMap = undefined;
|
||||
|
||||
pub fn initJSXEntityMap() !void {
|
||||
jsxEntity = JSXEntityMap.init(alloc.dynamic);
|
||||
jsxEntity.ensureCapacity(255) catch unreachable;
|
||||
|
||||
jsxEntity.putAssumeCapacity("quot", @as(CodePoint, 0x0022));
|
||||
jsxEntity.putAssumeCapacity("amp", @as(CodePoint, 0x0026));
|
||||
jsxEntity.putAssumeCapacity("apos", @as(CodePoint, 0x0027));
|
||||
jsxEntity.putAssumeCapacity("lt", @as(CodePoint, 0x003C));
|
||||
jsxEntity.putAssumeCapacity("gt", @as(CodePoint, 0x003E));
|
||||
jsxEntity.putAssumeCapacity("nbsp", @as(CodePoint, 0x00A0));
|
||||
jsxEntity.putAssumeCapacity("iexcl", @as(CodePoint, 0x00A1));
|
||||
jsxEntity.putAssumeCapacity("cent", @as(CodePoint, 0x00A2));
|
||||
jsxEntity.putAssumeCapacity("pound", @as(CodePoint, 0x00A3));
|
||||
jsxEntity.putAssumeCapacity("curren", @as(CodePoint, 0x00A4));
|
||||
jsxEntity.putAssumeCapacity("yen", @as(CodePoint, 0x00A5));
|
||||
jsxEntity.putAssumeCapacity("brvbar", @as(CodePoint, 0x00A6));
|
||||
jsxEntity.putAssumeCapacity("sect", @as(CodePoint, 0x00A7));
|
||||
jsxEntity.putAssumeCapacity("uml", @as(CodePoint, 0x00A8));
|
||||
jsxEntity.putAssumeCapacity("copy", @as(CodePoint, 0x00A9));
|
||||
jsxEntity.putAssumeCapacity("ordf", @as(CodePoint, 0x00AA));
|
||||
jsxEntity.putAssumeCapacity("laquo", @as(CodePoint, 0x00AB));
|
||||
jsxEntity.putAssumeCapacity("not", @as(CodePoint, 0x00AC));
|
||||
jsxEntity.putAssumeCapacity("shy", @as(CodePoint, 0x00AD));
|
||||
jsxEntity.putAssumeCapacity("reg", @as(CodePoint, 0x00AE));
|
||||
jsxEntity.putAssumeCapacity("macr", @as(CodePoint, 0x00AF));
|
||||
jsxEntity.putAssumeCapacity("deg", @as(CodePoint, 0x00B0));
|
||||
jsxEntity.putAssumeCapacity("plusmn", @as(CodePoint, 0x00B1));
|
||||
jsxEntity.putAssumeCapacity("sup2", @as(CodePoint, 0x00B2));
|
||||
jsxEntity.putAssumeCapacity("sup3", @as(CodePoint, 0x00B3));
|
||||
jsxEntity.putAssumeCapacity("acute", @as(CodePoint, 0x00B4));
|
||||
jsxEntity.putAssumeCapacity("micro", @as(CodePoint, 0x00B5));
|
||||
jsxEntity.putAssumeCapacity("para", @as(CodePoint, 0x00B6));
|
||||
jsxEntity.putAssumeCapacity("middot", @as(CodePoint, 0x00B7));
|
||||
jsxEntity.putAssumeCapacity("cedil", @as(CodePoint, 0x00B8));
|
||||
jsxEntity.putAssumeCapacity("sup1", @as(CodePoint, 0x00B9));
|
||||
jsxEntity.putAssumeCapacity("ordm", @as(CodePoint, 0x00BA));
|
||||
jsxEntity.putAssumeCapacity("raquo", @as(CodePoint, 0x00BB));
|
||||
jsxEntity.putAssumeCapacity("frac14", @as(CodePoint, 0x00BC));
|
||||
jsxEntity.putAssumeCapacity("frac12", @as(CodePoint, 0x00BD));
|
||||
jsxEntity.putAssumeCapacity("frac34", @as(CodePoint, 0x00BE));
|
||||
jsxEntity.putAssumeCapacity("iquest", @as(CodePoint, 0x00BF));
|
||||
jsxEntity.putAssumeCapacity("Agrave", @as(CodePoint, 0x00C0));
|
||||
jsxEntity.putAssumeCapacity("Aacute", @as(CodePoint, 0x00C1));
|
||||
jsxEntity.putAssumeCapacity("Acirc", @as(CodePoint, 0x00C2));
|
||||
jsxEntity.putAssumeCapacity("Atilde", @as(CodePoint, 0x00C3));
|
||||
jsxEntity.putAssumeCapacity("Auml", @as(CodePoint, 0x00C4));
|
||||
jsxEntity.putAssumeCapacity("Aring", @as(CodePoint, 0x00C5));
|
||||
jsxEntity.putAssumeCapacity("AElig", @as(CodePoint, 0x00C6));
|
||||
jsxEntity.putAssumeCapacity("Ccedil", @as(CodePoint, 0x00C7));
|
||||
jsxEntity.putAssumeCapacity("Egrave", @as(CodePoint, 0x00C8));
|
||||
jsxEntity.putAssumeCapacity("Eacute", @as(CodePoint, 0x00C9));
|
||||
jsxEntity.putAssumeCapacity("Ecirc", @as(CodePoint, 0x00CA));
|
||||
jsxEntity.putAssumeCapacity("Euml", @as(CodePoint, 0x00CB));
|
||||
jsxEntity.putAssumeCapacity("Igrave", @as(CodePoint, 0x00CC));
|
||||
jsxEntity.putAssumeCapacity("Iacute", @as(CodePoint, 0x00CD));
|
||||
jsxEntity.putAssumeCapacity("Icirc", @as(CodePoint, 0x00CE));
|
||||
jsxEntity.putAssumeCapacity("Iuml", @as(CodePoint, 0x00CF));
|
||||
jsxEntity.putAssumeCapacity("ETH", @as(CodePoint, 0x00D0));
|
||||
jsxEntity.putAssumeCapacity("Ntilde", @as(CodePoint, 0x00D1));
|
||||
jsxEntity.putAssumeCapacity("Ograve", @as(CodePoint, 0x00D2));
|
||||
jsxEntity.putAssumeCapacity("Oacute", @as(CodePoint, 0x00D3));
|
||||
jsxEntity.putAssumeCapacity("Ocirc", @as(CodePoint, 0x00D4));
|
||||
jsxEntity.putAssumeCapacity("Otilde", @as(CodePoint, 0x00D5));
|
||||
jsxEntity.putAssumeCapacity("Ouml", @as(CodePoint, 0x00D6));
|
||||
jsxEntity.putAssumeCapacity("times", @as(CodePoint, 0x00D7));
|
||||
jsxEntity.putAssumeCapacity("Oslash", @as(CodePoint, 0x00D8));
|
||||
jsxEntity.putAssumeCapacity("Ugrave", @as(CodePoint, 0x00D9));
|
||||
jsxEntity.putAssumeCapacity("Uacute", @as(CodePoint, 0x00DA));
|
||||
jsxEntity.putAssumeCapacity("Ucirc", @as(CodePoint, 0x00DB));
|
||||
jsxEntity.putAssumeCapacity("Uuml", @as(CodePoint, 0x00DC));
|
||||
jsxEntity.putAssumeCapacity("Yacute", @as(CodePoint, 0x00DD));
|
||||
jsxEntity.putAssumeCapacity("THORN", @as(CodePoint, 0x00DE));
|
||||
jsxEntity.putAssumeCapacity("szlig", @as(CodePoint, 0x00DF));
|
||||
jsxEntity.putAssumeCapacity("agrave", @as(CodePoint, 0x00E0));
|
||||
jsxEntity.putAssumeCapacity("aacute", @as(CodePoint, 0x00E1));
|
||||
jsxEntity.putAssumeCapacity("acirc", @as(CodePoint, 0x00E2));
|
||||
jsxEntity.putAssumeCapacity("atilde", @as(CodePoint, 0x00E3));
|
||||
jsxEntity.putAssumeCapacity("auml", @as(CodePoint, 0x00E4));
|
||||
jsxEntity.putAssumeCapacity("aring", @as(CodePoint, 0x00E5));
|
||||
jsxEntity.putAssumeCapacity("aelig", @as(CodePoint, 0x00E6));
|
||||
jsxEntity.putAssumeCapacity("ccedil", @as(CodePoint, 0x00E7));
|
||||
jsxEntity.putAssumeCapacity("egrave", @as(CodePoint, 0x00E8));
|
||||
jsxEntity.putAssumeCapacity("eacute", @as(CodePoint, 0x00E9));
|
||||
jsxEntity.putAssumeCapacity("ecirc", @as(CodePoint, 0x00EA));
|
||||
jsxEntity.putAssumeCapacity("euml", @as(CodePoint, 0x00EB));
|
||||
jsxEntity.putAssumeCapacity("igrave", @as(CodePoint, 0x00EC));
|
||||
jsxEntity.putAssumeCapacity("iacute", @as(CodePoint, 0x00ED));
|
||||
jsxEntity.putAssumeCapacity("icirc", @as(CodePoint, 0x00EE));
|
||||
jsxEntity.putAssumeCapacity("iuml", @as(CodePoint, 0x00EF));
|
||||
jsxEntity.putAssumeCapacity("eth", @as(CodePoint, 0x00F0));
|
||||
jsxEntity.putAssumeCapacity("ntilde", @as(CodePoint, 0x00F1));
|
||||
jsxEntity.putAssumeCapacity("ograve", @as(CodePoint, 0x00F2));
|
||||
jsxEntity.putAssumeCapacity("oacute", @as(CodePoint, 0x00F3));
|
||||
jsxEntity.putAssumeCapacity("ocirc", @as(CodePoint, 0x00F4));
|
||||
jsxEntity.putAssumeCapacity("otilde", @as(CodePoint, 0x00F5));
|
||||
jsxEntity.putAssumeCapacity("ouml", @as(CodePoint, 0x00F6));
|
||||
jsxEntity.putAssumeCapacity("divide", @as(CodePoint, 0x00F7));
|
||||
jsxEntity.putAssumeCapacity("oslash", @as(CodePoint, 0x00F8));
|
||||
jsxEntity.putAssumeCapacity("ugrave", @as(CodePoint, 0x00F9));
|
||||
jsxEntity.putAssumeCapacity("uacute", @as(CodePoint, 0x00FA));
|
||||
jsxEntity.putAssumeCapacity("ucirc", @as(CodePoint, 0x00FB));
|
||||
jsxEntity.putAssumeCapacity("uuml", @as(CodePoint, 0x00FC));
|
||||
jsxEntity.putAssumeCapacity("yacute", @as(CodePoint, 0x00FD));
|
||||
jsxEntity.putAssumeCapacity("thorn", @as(CodePoint, 0x00FE));
|
||||
jsxEntity.putAssumeCapacity("yuml", @as(CodePoint, 0x00FF));
|
||||
jsxEntity.putAssumeCapacity("OElig", @as(CodePoint, 0x0152));
|
||||
jsxEntity.putAssumeCapacity("oelig", @as(CodePoint, 0x0153));
|
||||
jsxEntity.putAssumeCapacity("Scaron", @as(CodePoint, 0x0160));
|
||||
jsxEntity.putAssumeCapacity("scaron", @as(CodePoint, 0x0161));
|
||||
jsxEntity.putAssumeCapacity("Yuml", @as(CodePoint, 0x0178));
|
||||
jsxEntity.putAssumeCapacity("fnof", @as(CodePoint, 0x0192));
|
||||
jsxEntity.putAssumeCapacity("circ", @as(CodePoint, 0x02C6));
|
||||
jsxEntity.putAssumeCapacity("tilde", @as(CodePoint, 0x02DC));
|
||||
jsxEntity.putAssumeCapacity("Alpha", @as(CodePoint, 0x0391));
|
||||
jsxEntity.putAssumeCapacity("Beta", @as(CodePoint, 0x0392));
|
||||
jsxEntity.putAssumeCapacity("Gamma", @as(CodePoint, 0x0393));
|
||||
jsxEntity.putAssumeCapacity("Delta", @as(CodePoint, 0x0394));
|
||||
jsxEntity.putAssumeCapacity("Epsilon", @as(CodePoint, 0x0395));
|
||||
jsxEntity.putAssumeCapacity("Zeta", @as(CodePoint, 0x0396));
|
||||
jsxEntity.putAssumeCapacity("Eta", @as(CodePoint, 0x0397));
|
||||
jsxEntity.putAssumeCapacity("Theta", @as(CodePoint, 0x0398));
|
||||
jsxEntity.putAssumeCapacity("Iota", @as(CodePoint, 0x0399));
|
||||
jsxEntity.putAssumeCapacity("Kappa", @as(CodePoint, 0x039A));
|
||||
jsxEntity.putAssumeCapacity("Lambda", @as(CodePoint, 0x039B));
|
||||
jsxEntity.putAssumeCapacity("Mu", @as(CodePoint, 0x039C));
|
||||
jsxEntity.putAssumeCapacity("Nu", @as(CodePoint, 0x039D));
|
||||
jsxEntity.putAssumeCapacity("Xi", @as(CodePoint, 0x039E));
|
||||
jsxEntity.putAssumeCapacity("Omicron", @as(CodePoint, 0x039F));
|
||||
jsxEntity.putAssumeCapacity("Pi", @as(CodePoint, 0x03A0));
|
||||
jsxEntity.putAssumeCapacity("Rho", @as(CodePoint, 0x03A1));
|
||||
jsxEntity.putAssumeCapacity("Sigma", @as(CodePoint, 0x03A3));
|
||||
jsxEntity.putAssumeCapacity("Tau", @as(CodePoint, 0x03A4));
|
||||
jsxEntity.putAssumeCapacity("Upsilon", @as(CodePoint, 0x03A5));
|
||||
jsxEntity.putAssumeCapacity("Phi", @as(CodePoint, 0x03A6));
|
||||
jsxEntity.putAssumeCapacity("Chi", @as(CodePoint, 0x03A7));
|
||||
jsxEntity.putAssumeCapacity("Psi", @as(CodePoint, 0x03A8));
|
||||
jsxEntity.putAssumeCapacity("Omega", @as(CodePoint, 0x03A9));
|
||||
jsxEntity.putAssumeCapacity("alpha", @as(CodePoint, 0x03B1));
|
||||
jsxEntity.putAssumeCapacity("beta", @as(CodePoint, 0x03B2));
|
||||
jsxEntity.putAssumeCapacity("gamma", @as(CodePoint, 0x03B3));
|
||||
jsxEntity.putAssumeCapacity("delta", @as(CodePoint, 0x03B4));
|
||||
jsxEntity.putAssumeCapacity("epsilon", @as(CodePoint, 0x03B5));
|
||||
jsxEntity.putAssumeCapacity("zeta", @as(CodePoint, 0x03B6));
|
||||
jsxEntity.putAssumeCapacity("eta", @as(CodePoint, 0x03B7));
|
||||
jsxEntity.putAssumeCapacity("theta", @as(CodePoint, 0x03B8));
|
||||
jsxEntity.putAssumeCapacity("iota", @as(CodePoint, 0x03B9));
|
||||
jsxEntity.putAssumeCapacity("kappa", @as(CodePoint, 0x03BA));
|
||||
jsxEntity.putAssumeCapacity("lambda", @as(CodePoint, 0x03BB));
|
||||
jsxEntity.putAssumeCapacity("mu", @as(CodePoint, 0x03BC));
|
||||
jsxEntity.putAssumeCapacity("nu", @as(CodePoint, 0x03BD));
|
||||
jsxEntity.putAssumeCapacity("xi", @as(CodePoint, 0x03BE));
|
||||
jsxEntity.putAssumeCapacity("omicron", @as(CodePoint, 0x03BF));
|
||||
jsxEntity.putAssumeCapacity("pi", @as(CodePoint, 0x03C0));
|
||||
jsxEntity.putAssumeCapacity("rho", @as(CodePoint, 0x03C1));
|
||||
jsxEntity.putAssumeCapacity("sigmaf", @as(CodePoint, 0x03C2));
|
||||
jsxEntity.putAssumeCapacity("sigma", @as(CodePoint, 0x03C3));
|
||||
jsxEntity.putAssumeCapacity("tau", @as(CodePoint, 0x03C4));
|
||||
jsxEntity.putAssumeCapacity("upsilon", @as(CodePoint, 0x03C5));
|
||||
jsxEntity.putAssumeCapacity("phi", @as(CodePoint, 0x03C6));
|
||||
jsxEntity.putAssumeCapacity("chi", @as(CodePoint, 0x03C7));
|
||||
jsxEntity.putAssumeCapacity("psi", @as(CodePoint, 0x03C8));
|
||||
jsxEntity.putAssumeCapacity("omega", @as(CodePoint, 0x03C9));
|
||||
jsxEntity.putAssumeCapacity("thetasym", @as(CodePoint, 0x03D1));
|
||||
jsxEntity.putAssumeCapacity("upsih", @as(CodePoint, 0x03D2));
|
||||
jsxEntity.putAssumeCapacity("piv", @as(CodePoint, 0x03D6));
|
||||
jsxEntity.putAssumeCapacity("ensp", @as(CodePoint, 0x2002));
|
||||
jsxEntity.putAssumeCapacity("emsp", @as(CodePoint, 0x2003));
|
||||
jsxEntity.putAssumeCapacity("thinsp", @as(CodePoint, 0x2009));
|
||||
jsxEntity.putAssumeCapacity("zwnj", @as(CodePoint, 0x200C));
|
||||
jsxEntity.putAssumeCapacity("zwj", @as(CodePoint, 0x200D));
|
||||
jsxEntity.putAssumeCapacity("lrm", @as(CodePoint, 0x200E));
|
||||
jsxEntity.putAssumeCapacity("rlm", @as(CodePoint, 0x200F));
|
||||
jsxEntity.putAssumeCapacity("ndash", @as(CodePoint, 0x2013));
|
||||
jsxEntity.putAssumeCapacity("mdash", @as(CodePoint, 0x2014));
|
||||
jsxEntity.putAssumeCapacity("lsquo", @as(CodePoint, 0x2018));
|
||||
jsxEntity.putAssumeCapacity("rsquo", @as(CodePoint, 0x2019));
|
||||
jsxEntity.putAssumeCapacity("sbquo", @as(CodePoint, 0x201A));
|
||||
jsxEntity.putAssumeCapacity("ldquo", @as(CodePoint, 0x201C));
|
||||
jsxEntity.putAssumeCapacity("rdquo", @as(CodePoint, 0x201D));
|
||||
jsxEntity.putAssumeCapacity("bdquo", @as(CodePoint, 0x201E));
|
||||
jsxEntity.putAssumeCapacity("dagger", @as(CodePoint, 0x2020));
|
||||
jsxEntity.putAssumeCapacity("Dagger", @as(CodePoint, 0x2021));
|
||||
jsxEntity.putAssumeCapacity("bull", @as(CodePoint, 0x2022));
|
||||
jsxEntity.putAssumeCapacity("hellip", @as(CodePoint, 0x2026));
|
||||
jsxEntity.putAssumeCapacity("permil", @as(CodePoint, 0x2030));
|
||||
jsxEntity.putAssumeCapacity("prime", @as(CodePoint, 0x2032));
|
||||
jsxEntity.putAssumeCapacity("Prime", @as(CodePoint, 0x2033));
|
||||
jsxEntity.putAssumeCapacity("lsaquo", @as(CodePoint, 0x2039));
|
||||
jsxEntity.putAssumeCapacity("rsaquo", @as(CodePoint, 0x203A));
|
||||
jsxEntity.putAssumeCapacity("oline", @as(CodePoint, 0x203E));
|
||||
jsxEntity.putAssumeCapacity("frasl", @as(CodePoint, 0x2044));
|
||||
jsxEntity.putAssumeCapacity("euro", @as(CodePoint, 0x20AC));
|
||||
jsxEntity.putAssumeCapacity("image", @as(CodePoint, 0x2111));
|
||||
jsxEntity.putAssumeCapacity("weierp", @as(CodePoint, 0x2118));
|
||||
jsxEntity.putAssumeCapacity("real", @as(CodePoint, 0x211C));
|
||||
jsxEntity.putAssumeCapacity("trade", @as(CodePoint, 0x2122));
|
||||
jsxEntity.putAssumeCapacity("alefsym", @as(CodePoint, 0x2135));
|
||||
jsxEntity.putAssumeCapacity("larr", @as(CodePoint, 0x2190));
|
||||
jsxEntity.putAssumeCapacity("uarr", @as(CodePoint, 0x2191));
|
||||
jsxEntity.putAssumeCapacity("rarr", @as(CodePoint, 0x2192));
|
||||
jsxEntity.putAssumeCapacity("darr", @as(CodePoint, 0x2193));
|
||||
jsxEntity.putAssumeCapacity("harr", @as(CodePoint, 0x2194));
|
||||
jsxEntity.putAssumeCapacity("crarr", @as(CodePoint, 0x21B5));
|
||||
jsxEntity.putAssumeCapacity("lArr", @as(CodePoint, 0x21D0));
|
||||
jsxEntity.putAssumeCapacity("uArr", @as(CodePoint, 0x21D1));
|
||||
jsxEntity.putAssumeCapacity("rArr", @as(CodePoint, 0x21D2));
|
||||
jsxEntity.putAssumeCapacity("dArr", @as(CodePoint, 0x21D3));
|
||||
jsxEntity.putAssumeCapacity("hArr", @as(CodePoint, 0x21D4));
|
||||
jsxEntity.putAssumeCapacity("forall", @as(CodePoint, 0x2200));
|
||||
jsxEntity.putAssumeCapacity("part", @as(CodePoint, 0x2202));
|
||||
jsxEntity.putAssumeCapacity("exist", @as(CodePoint, 0x2203));
|
||||
jsxEntity.putAssumeCapacity("empty", @as(CodePoint, 0x2205));
|
||||
jsxEntity.putAssumeCapacity("nabla", @as(CodePoint, 0x2207));
|
||||
jsxEntity.putAssumeCapacity("isin", @as(CodePoint, 0x2208));
|
||||
jsxEntity.putAssumeCapacity("notin", @as(CodePoint, 0x2209));
|
||||
jsxEntity.putAssumeCapacity("ni", @as(CodePoint, 0x220B));
|
||||
jsxEntity.putAssumeCapacity("prod", @as(CodePoint, 0x220F));
|
||||
jsxEntity.putAssumeCapacity("sum", @as(CodePoint, 0x2211));
|
||||
jsxEntity.putAssumeCapacity("minus", @as(CodePoint, 0x2212));
|
||||
jsxEntity.putAssumeCapacity("lowast", @as(CodePoint, 0x2217));
|
||||
jsxEntity.putAssumeCapacity("radic", @as(CodePoint, 0x221A));
|
||||
jsxEntity.putAssumeCapacity("prop", @as(CodePoint, 0x221D));
|
||||
jsxEntity.putAssumeCapacity("infin", @as(CodePoint, 0x221E));
|
||||
jsxEntity.putAssumeCapacity("ang", @as(CodePoint, 0x2220));
|
||||
jsxEntity.putAssumeCapacity("and", @as(CodePoint, 0x2227));
|
||||
jsxEntity.putAssumeCapacity("or", @as(CodePoint, 0x2228));
|
||||
jsxEntity.putAssumeCapacity("cap", @as(CodePoint, 0x2229));
|
||||
jsxEntity.putAssumeCapacity("cup", @as(CodePoint, 0x222A));
|
||||
jsxEntity.putAssumeCapacity("int", @as(CodePoint, 0x222B));
|
||||
jsxEntity.putAssumeCapacity("there4", @as(CodePoint, 0x2234));
|
||||
jsxEntity.putAssumeCapacity("sim", @as(CodePoint, 0x223C));
|
||||
jsxEntity.putAssumeCapacity("cong", @as(CodePoint, 0x2245));
|
||||
jsxEntity.putAssumeCapacity("asymp", @as(CodePoint, 0x2248));
|
||||
jsxEntity.putAssumeCapacity("ne", @as(CodePoint, 0x2260));
|
||||
jsxEntity.putAssumeCapacity("equiv", @as(CodePoint, 0x2261));
|
||||
jsxEntity.putAssumeCapacity("le", @as(CodePoint, 0x2264));
|
||||
jsxEntity.putAssumeCapacity("ge", @as(CodePoint, 0x2265));
|
||||
jsxEntity.putAssumeCapacity("sub", @as(CodePoint, 0x2282));
|
||||
jsxEntity.putAssumeCapacity("sup", @as(CodePoint, 0x2283));
|
||||
jsxEntity.putAssumeCapacity("nsub", @as(CodePoint, 0x2284));
|
||||
jsxEntity.putAssumeCapacity("sube", @as(CodePoint, 0x2286));
|
||||
jsxEntity.putAssumeCapacity("supe", @as(CodePoint, 0x2287));
|
||||
jsxEntity.putAssumeCapacity("oplus", @as(CodePoint, 0x2295));
|
||||
jsxEntity.putAssumeCapacity("otimes", @as(CodePoint, 0x2297));
|
||||
jsxEntity.putAssumeCapacity("perp", @as(CodePoint, 0x22A5));
|
||||
jsxEntity.putAssumeCapacity("sdot", @as(CodePoint, 0x22C5));
|
||||
jsxEntity.putAssumeCapacity("lceil", @as(CodePoint, 0x2308));
|
||||
jsxEntity.putAssumeCapacity("rceil", @as(CodePoint, 0x2309));
|
||||
jsxEntity.putAssumeCapacity("lfloor", @as(CodePoint, 0x230A));
|
||||
jsxEntity.putAssumeCapacity("rfloor", @as(CodePoint, 0x230B));
|
||||
jsxEntity.putAssumeCapacity("lang", @as(CodePoint, 0x2329));
|
||||
jsxEntity.putAssumeCapacity("rang", @as(CodePoint, 0x232A));
|
||||
jsxEntity.putAssumeCapacity("loz", @as(CodePoint, 0x25CA));
|
||||
jsxEntity.putAssumeCapacity("spades", @as(CodePoint, 0x2660));
|
||||
jsxEntity.putAssumeCapacity("clubs", @as(CodePoint, 0x2663));
|
||||
jsxEntity.putAssumeCapacity("hearts", @as(CodePoint, 0x2665));
|
||||
jsxEntity.putAssumeCapacity("diams", @as(CodePoint, 0x2666));
|
||||
}
|
||||
|
||||
test "tokenToString" {
|
||||
expectString(tokenToString.get(T.t_end_of_file), "end of file");
|
||||
}
|
||||
|
||||
test "jsxEntity" {
|
||||
try alloc.setup(std.heap.page_allocator);
|
||||
|
||||
initJSXEntityMap() catch |err| {
|
||||
@panic(@errorName(err));
|
||||
};
|
||||
|
||||
if (jsxEntity.get("sim")) |v| {
|
||||
expect(v == 0x223C);
|
||||
}
|
||||
}
|
||||
10
src/js_parser.zig
Normal file
10
src/js_parser.zig
Normal file
@@ -0,0 +1,10 @@
|
||||
const std = @import("std");
|
||||
const logger = @import("logger.zig");
|
||||
const lexer = @import("lexer.zig");
|
||||
const ast = @import("js_ast.zig");
|
||||
|
||||
pub fn Parse(
|
||||
log: logger.Log,
|
||||
source: logger.Source,
|
||||
|
||||
)
|
||||
@@ -1,176 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub const T = enum(u8) {
|
||||
t_end_of_file,
|
||||
t_syntax_error,
|
||||
|
||||
// "#!/usr/bin/env node"
|
||||
t_hashbang,
|
||||
|
||||
// literals
|
||||
t_no_substitution_template_literal, // contents are in lexer.string_literal ([]uint16)
|
||||
t_numeric_literal, // contents are in lexer.number (float64)
|
||||
t_string_literal, // contents are in lexer.string_literal ([]uint16)
|
||||
t_big_integer_literal, // contents are in lexer.identifier (string)
|
||||
|
||||
// pseudo-literals
|
||||
t_template_head, // contents are in lexer.string_literal ([]uint16)
|
||||
t_template_middle, // contents are in lexer.string_literal ([]uint16)
|
||||
t_template_tail, // contents are in lexer.string_literal ([]uint16)
|
||||
|
||||
// punctuation
|
||||
t_ampersand,
|
||||
t_ampersand_ampersand,
|
||||
t_asterisk,
|
||||
t_asterisk_asterisk,
|
||||
t_at,
|
||||
t_bar,
|
||||
t_bar_bar,
|
||||
t_caret,
|
||||
t_close_brace,
|
||||
t_close_bracket,
|
||||
t_close_paren,
|
||||
t_colon,
|
||||
t_comma,
|
||||
t_dot,
|
||||
t_dot_dot_dot,
|
||||
t_equals_equals,
|
||||
t_equals_equals_equals,
|
||||
t_equals_greater_than,
|
||||
t_exclamation,
|
||||
t_exclamation_equals,
|
||||
t_exclamation_equals_equals,
|
||||
t_greater_than,
|
||||
t_greater_than_equals,
|
||||
t_greater_than_greater_than,
|
||||
t_greater_than_greater_than_greater_than,
|
||||
t_less_than,
|
||||
t_less_than_equals,
|
||||
t_less_than_less_than,
|
||||
t_minus,
|
||||
t_minus_minus,
|
||||
t_open_brace,
|
||||
t_open_bracket,
|
||||
t_open_paren,
|
||||
t_percent,
|
||||
t_plus,
|
||||
t_plus_plus,
|
||||
t_question,
|
||||
t_question_dot,
|
||||
t_question_question,
|
||||
t_semicolon,
|
||||
t_slash,
|
||||
t_tilde,
|
||||
|
||||
// assignments (keep in sync with is_assign() below)
|
||||
t_ampersand_ampersand_equals,
|
||||
t_ampersand_equals,
|
||||
t_asterisk_asterisk_equals,
|
||||
t_asterisk_equals,
|
||||
t_bar_bar_equals,
|
||||
t_bar_equals,
|
||||
t_caret_equals,
|
||||
t_equals,
|
||||
t_greater_than_greater_than_equals,
|
||||
t_greater_than_greater_than_greater_than_equals,
|
||||
t_less_than_less_than_equals,
|
||||
t_minus_equals,
|
||||
t_percent_equals,
|
||||
t_plus_equals,
|
||||
t_question_question_equals,
|
||||
t_slash_equals,
|
||||
|
||||
// class-private fields and methods
|
||||
t_private_identifier,
|
||||
|
||||
// identifiers
|
||||
t_identifier, // contents are in lexer.identifier (string)
|
||||
t_escaped_keyword, // a keyword that has been escaped as an identifer
|
||||
|
||||
// reserved words
|
||||
t_break,
|
||||
t_case,
|
||||
t_catch,
|
||||
t_class,
|
||||
t_const,
|
||||
t_continue,
|
||||
t_debugger,
|
||||
t_default,
|
||||
t_delete,
|
||||
t_do,
|
||||
t_else,
|
||||
t_enum,
|
||||
t_export,
|
||||
t_extends,
|
||||
t_false,
|
||||
t_finally,
|
||||
t_for,
|
||||
t_function,
|
||||
t_if,
|
||||
t_import,
|
||||
t_in,
|
||||
t_instanceof,
|
||||
t_new,
|
||||
t_null,
|
||||
t_return,
|
||||
t_super,
|
||||
t_switch,
|
||||
t_this,
|
||||
t_throw,
|
||||
t_true,
|
||||
t_try,
|
||||
t_typeof,
|
||||
t_var,
|
||||
t_void,
|
||||
t_while,
|
||||
t_with,
|
||||
|
||||
pub fn isAssign() bool {
|
||||
return self >= T.t_ampersand_ampersand_equals and self <= T.t_slash_equals;
|
||||
}
|
||||
|
||||
pub fn isReservedWord() bool {
|
||||
return self >= T.t_break and self <= T.t_with;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Keywords = std.ComptimeStringMap(T, .{
|
||||
.{ "break", .t_break },
|
||||
.{ "case", .t_case },
|
||||
.{ "catch", .t_catch },
|
||||
.{ "class", .t_class },
|
||||
.{ "const", .t_const },
|
||||
.{ "continue", .t_continue },
|
||||
.{ "debugger", .t_debugger },
|
||||
.{ "default", .t_default },
|
||||
.{ "delete", .t_delete },
|
||||
.{ "do", .t_do },
|
||||
.{ "else", .t_else },
|
||||
.{ "enum", .t_enum },
|
||||
.{ "export", .t_export },
|
||||
.{ "extends", .t_extends },
|
||||
.{ "false", .t_false },
|
||||
.{ "finally", .t_finally },
|
||||
.{ "for", .t_for },
|
||||
.{ "function", .t_function },
|
||||
.{ "if", .t_if },
|
||||
.{ "import", .t_import },
|
||||
.{ "in", .t_in },
|
||||
.{ "instanceof", .t_instanceof },
|
||||
.{ "new", .t_new },
|
||||
.{ "null", .t_null },
|
||||
.{ "return", .t_return },
|
||||
.{ "super", .t_super },
|
||||
.{ "switch", .t_switch },
|
||||
.{ "this", .t_this },
|
||||
.{ "throw", .t_throw },
|
||||
.{ "true", .t_true },
|
||||
.{ "try", .t_try },
|
||||
.{ "typeof", .t_typeof },
|
||||
.{ "var", .t_var },
|
||||
.{ "void", .t_void },
|
||||
.{ "while", .t_while },
|
||||
.{ "with", .t_with },
|
||||
});
|
||||
|
||||
const Lexer = struct {};
|
||||
118
src/logger.zig
Normal file
118
src/logger.zig
Normal file
@@ -0,0 +1,118 @@
|
||||
const std = @import("std");
|
||||
const strings = @import("strings.zig");
|
||||
|
||||
const expect = std.testing.expect;
|
||||
const assert = std.debug.assert;
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
pub const Kind = enum {
|
||||
err,
|
||||
warn,
|
||||
note,
|
||||
debug,
|
||||
|
||||
pub fn string(self: Kind) []const u8 {
|
||||
return switch (self) {
|
||||
.err => "error",
|
||||
.warn => "warn",
|
||||
.note => "note",
|
||||
.debug => "debug",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Loc = i32;
|
||||
|
||||
pub const Location = struct {
|
||||
file: []u8,
|
||||
namespace: []u8 = "file",
|
||||
line: i32 = 1, // 1-based
|
||||
column: i32 = 0, // 0-based, in bytes
|
||||
length: u32 = 0, // in bytes
|
||||
line_text: ?[]u8,
|
||||
suggestion: ?[]u8,
|
||||
|
||||
pub fn init(file: []u8, namespace: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
|
||||
return Location{
|
||||
.file = file,
|
||||
.namespace = namespace,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.length = length,
|
||||
.line_text = line_text,
|
||||
.suggestion = suggestion,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init_file(file: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
|
||||
var namespace = "file".*;
|
||||
|
||||
return Location{
|
||||
.file = file,
|
||||
.namespace = &namespace,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.length = length,
|
||||
.line_text = line_text,
|
||||
.suggestion = suggestion,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Data = struct { text: []u8, location: *Location };
|
||||
|
||||
pub const Msg = struct {
|
||||
kind: Kind = Kind.err,
|
||||
data: Data,
|
||||
};
|
||||
|
||||
pub const Range = struct { start: u32 = 0, len: i32 = 0 };
|
||||
|
||||
pub const Log = struct {
|
||||
debug: bool = false,
|
||||
warnings: u8 = 0,
|
||||
errors: u8 = 0,
|
||||
msgs: ArrayList(Msg),
|
||||
|
||||
// TODO:
|
||||
pub fn add_msg(self: *Log, msg: Msg) !void {
|
||||
try self.msgs.append(msg);
|
||||
}
|
||||
|
||||
// TODO:
|
||||
pub fn add_err(self: *Log, msg: Msg) !void {
|
||||
// try self.msgs.append(msg);
|
||||
}
|
||||
|
||||
// TODO:
|
||||
pub fn print(self: *Log, to: anytype) !void {
|
||||
for (self.msgs.items) |msg| {
|
||||
try std.fmt.format(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.line_text, msg.data.location.file, msg.data.location.line, msg.data.location.column });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Source = struct { index: u32 = 0, contents: []u8,
|
||||
|
||||
// An identifier that is mixed in to automatically-generated symbol names to
|
||||
// improve readability. For example, if the identifier is "util" then the
|
||||
// symbol for an "export default" statement will be called "util_default".
|
||||
identifier_name: []u8 };
|
||||
|
||||
test "print msg" {
|
||||
var log = Log{ .msgs = ArrayList(Msg).init(std.testing.allocator) };
|
||||
defer log.msgs.deinit();
|
||||
var filename = "test.js".*;
|
||||
var syntax = "for(i = 0;)".*;
|
||||
var err = "invalid syntax".*;
|
||||
var namespace = "file".*;
|
||||
|
||||
try log.add_msg(Msg{
|
||||
.kind = .err,
|
||||
.data = Data{ .location = &Location.init_file(&filename, 1, 3, 0, &syntax, ""), .text = &err },
|
||||
});
|
||||
|
||||
const stdout = std.io.getStdOut().writer();
|
||||
|
||||
try log.print(stdout);
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
const std = @import("std");
|
||||
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
pub const Msg = struct {
|
||||
pub const Kind = enum {
|
||||
err,
|
||||
warn,
|
||||
note,
|
||||
debug,
|
||||
|
||||
pub fn string(self: Kind) []const u8 {
|
||||
return switch (self) {
|
||||
.err => "error",
|
||||
.warn => "warn",
|
||||
.note => "note",
|
||||
.debug => "debug",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Location = struct {
|
||||
file: []u8,
|
||||
namespace: []u8 = "file",
|
||||
line: i32 = 1, // 1-based
|
||||
column: i32 = 0, // 0-based, in bytes
|
||||
length: u32 = 0, // in bytes
|
||||
line_text: ?[]u8,
|
||||
suggestion: ?[]u8,
|
||||
|
||||
pub fn init(file: []u8, namespace: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
|
||||
return Location{
|
||||
.file = file,
|
||||
.namespace = namespace,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.length = length,
|
||||
.line_text = line_text,
|
||||
.suggestion = suggestion,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init_file(file: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
|
||||
var namespace = "file".*;
|
||||
return Location{
|
||||
.file = file,
|
||||
.namespace = &namespace,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.length = length,
|
||||
.line_text = line_text,
|
||||
.suggestion = suggestion,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Data = struct { text: []u8, location: *Msg.Location };
|
||||
|
||||
kind: Kind,
|
||||
data: Data,
|
||||
};
|
||||
|
||||
pub const Log = struct {
|
||||
debug: bool = false,
|
||||
warnings: u8 = 0,
|
||||
errors: u8 = 0,
|
||||
msgs: ArrayList(Msg),
|
||||
|
||||
pub fn add_msg(self: *Log, msg: Msg) !void {
|
||||
try self.msgs.append(msg);
|
||||
}
|
||||
|
||||
pub fn print(self: *Log) void {
|
||||
if (self.msgs.items.len > 0) {
|
||||
var msg: Msg = self.msgs.items[0];
|
||||
std.debug.print("\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.line_text, msg.data.location.file, msg.data.location.line, msg.data.location.column });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const Source = struct { index: u32, contents: []u8,
|
||||
// An identifier that is mixed in to automatically-generated symbol names to
|
||||
// improve readability. For example, if the identifier is "util" then the
|
||||
// symbol for an "export default" statement will be called "util_default".
|
||||
identifier_name: []u8 };
|
||||
|
||||
test "print msg" {
|
||||
var log = Log{ .msgs = ArrayList(Msg).init(std.testing.allocator) };
|
||||
defer log.msgs.deinit();
|
||||
var filename = "test.js".*;
|
||||
var syntax = "for(i = 0;)".*;
|
||||
var err = "invalid syntax".*;
|
||||
var namespace = "file".*;
|
||||
|
||||
try log.add_msg(Msg{
|
||||
.kind = .err,
|
||||
.data = Msg.Data{ .location = &Msg.Location.init_file(&filename, 1, 3, 0, &syntax, ""), .text = &err },
|
||||
});
|
||||
|
||||
log.print();
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
const std = @import("std");
|
||||
const lex = @import("lexer/js_lexer.zig");
|
||||
const lex = @import("js_lexer.zig");
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
std.log.info("All your codebase are belong to us. {s}", .{lex.Keywords.get("hey")});
|
||||
|
||||
}
|
||||
|
||||
31
src/options.zig
Normal file
31
src/options.zig
Normal file
@@ -0,0 +1,31 @@
|
||||
const std = @import("std");
|
||||
const log = @import("logger.zig");
|
||||
|
||||
pub const Loader = enum {
|
||||
jsx,
|
||||
js,
|
||||
ts,
|
||||
tsx,
|
||||
};
|
||||
|
||||
pub const TransformOptions = struct {
|
||||
footer: []u8 = "",
|
||||
banner: []u8 = "",
|
||||
define: std.StringHashMap([]u8),
|
||||
loader: Loader = Loader.tsx,
|
||||
resolve_dir: []u8 = "/",
|
||||
react_fast_refresh: bool = false,
|
||||
jsx_factory: []u8 = "React.createElement",
|
||||
jsx_pragma: []u8 = "jsx",
|
||||
inject: [][]u8,
|
||||
public_url: []u8,
|
||||
filesystem_cache: std.StringHashMap(fs.File),
|
||||
entry_point: fs.File,
|
||||
};
|
||||
|
||||
pub const OutputFile = struct {
|
||||
path: []u8,
|
||||
contents: []u8,
|
||||
};
|
||||
|
||||
pub const TransformResult = struct { errors: []log.Msg, warnings: []log.Msg, output_files: []OutputFile };
|
||||
8
src/strings.zig
Normal file
8
src/strings.zig
Normal file
@@ -0,0 +1,8 @@
|
||||
const std = @import("std");
|
||||
pub fn indexOfChar(contents: []u8, char: u8) callconv(.Inline) ?usize {
|
||||
return std.mem.indexOfScalar(u8, contents, char);
|
||||
}
|
||||
|
||||
pub fn lastIndexOfChar(contents: []u8, char: u8) callconv(.Inline) ?usize {
|
||||
return std.mem.lastIndexOfScalar(u8, contents, char);
|
||||
}
|
||||
9
src/test/fixtures.zig
Normal file
9
src/test/fixtures.zig
Normal file
@@ -0,0 +1,9 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub const fixtures = std.ComptimeStringMap([]u8, .{
|
||||
.{ "package.json", @embedFile("./fixtures/package.json") },
|
||||
.{ "tsconfig.json", @embedFile("./fixtures/tsconfig.json") },
|
||||
.{ "simple-component.js", @embedFile("./fixtures/simple-component.js") },
|
||||
.{ "simple-component.tsx", @embedFile("./fixtures/simple-component.tsx") },
|
||||
.{ "simple-component.tsx", @embedFile("./fixtures/simple-component.tsx") },
|
||||
});
|
||||
1
src/test/fixtures/noop.js
vendored
Normal file
1
src/test/fixtures/noop.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
function hi() {}
|
||||
7
src/test/fixtures/package.json
vendored
Normal file
7
src/test/fixtures/package.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "repo",
|
||||
"dependencies": {
|
||||
"react": "^16.8.4",
|
||||
"react-dom": "^16.8.4"
|
||||
}
|
||||
}
|
||||
5
src/test/fixtures/simple-component.js
vendored
Normal file
5
src/test/fixtures/simple-component.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import * as React from "react";
|
||||
|
||||
export function Welcome() {
|
||||
return <div>Hi.</div>;
|
||||
}
|
||||
9
src/test/fixtures/simple-component.tsx
vendored
Normal file
9
src/test/fixtures/simple-component.tsx
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import * as React from "react";
|
||||
|
||||
type WelcomeProps = {
|
||||
greeting?: string;
|
||||
};
|
||||
|
||||
export function Welcome(props: WelcomeProps) {
|
||||
return <div>{props.greeting}</div>;
|
||||
}
|
||||
9
src/test/fixtures/tsconfig.json
vendored
Normal file
9
src/test/fixtures/tsconfig.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": "/Users/jarredsumner/Code/esdev/src/test/fixtures",
|
||||
"paths": {
|
||||
"components": ["components/*"]
|
||||
},
|
||||
"jsx": "preserve"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user