it prints true and false

This commit is contained in:
Jarred Sumner
2021-04-25 18:47:09 -07:00
parent c0b7f71b9a
commit 99918c35ec
11 changed files with 801 additions and 116 deletions

View File

@@ -10,7 +10,7 @@ pub const Bundler = struct {
result: ?options.TransformResult = null,
pub fn init(options: options.TransformOptions, allocator: *std.mem.Allocator) Bundler {
var log = logger.Log{ .msgs = ArrayList(Msg).init(allocator) };
var log = logger.Log.init(allocator);
return Bundler{
.options = options,
@@ -25,7 +25,5 @@ pub const Bundler = struct {
var result = self.result;
var source = logger.Source.initFile(self.options.entry_point, self.allocator);
}
};

View File

@@ -68,6 +68,10 @@ pub const Ref = packed struct {
pub fn isSourceNull(self: *const Ref) bool {
return self.source_index == std.math.maxInt(Ref.Int);
}
pub fn eql(ref: *Ref, b: Ref) bool {
return ref.inner_index == b.inner_index and ref.source_index == b.source_index;
}
};
pub const ImportItemStatus = packed enum {
@@ -550,6 +554,22 @@ pub const Symbol = struct {
var symbols_for_source: [][]Symbol = try allocator.alloc([]Symbol, sourceCount);
return Map{ .symbols_for_source = symbols_for_source };
}
pub fn follow(symbols: *Map, ref: Ref) Ref {
if (symbols.get(ref)) |*symbol| {
if (symbol.link) |link| {
if (!link.eql(ref)) {
symbol.link = ref;
}
return link;
} else {
return symbol;
}
} else {
return ref;
}
}
};
pub fn isKindPrivate(kind: Symbol.Kind) bool {
@@ -767,6 +787,10 @@ pub const E = struct {
value: JavascriptString,
legacy_octal_loc: ?logger.Loc = null,
prefer_template: bool = false,
pub fn string(s: *String, allocator: *std.mem.Allocator) !string {
return try std.unicode.utf16leToUtf8Alloc(allocator, s.value);
}
};
// value is in the Node
@@ -804,8 +828,12 @@ pub const E = struct {
no: ExprNodeIndex,
};
pub const Require = struct {
import_record_index: u32 = 0,
};
pub const RequireOrRequireResolve = struct {
import_record_index: u32,
import_record_index: u32 = 0,
};
pub const Import = struct {
@@ -1376,18 +1404,26 @@ pub const Expr = struct {
.data = Data{ .e_if = exp },
};
},
*E.RequireOrRequireResolve => {
return Expr{
.loc = loc,
.data = Data{ .e_require_or_require_resolve = exp },
};
},
*E.Import => {
return Expr{
.loc = loc,
.data = Data{ .e_import = exp },
};
},
*E.Require => {
return Expr{
.loc = loc,
.data = Data{ .e_require = exp },
};
},
*E.RequireOrRequireResolve => {
return Expr{
.loc = loc,
.data = Data{ .e_require_or_require_resolve = exp },
};
},
else => {
@compileError("Expr.init needs a pointer to E.*");
},
@@ -1571,6 +1607,12 @@ pub const Expr = struct {
dat.* = st;
return Expr{ .loc = loc, .data = Data{ .e_import = dat } };
},
E.Require => {
var dat = allocator.create(E.Require) catch unreachable;
dat.* = st;
return Expr{ .loc = loc, .data = Data{ .e_require = dat } };
},
else => {
@compileError("Invalid type passed to Expr.init");
},
@@ -1614,6 +1656,8 @@ pub const Expr = struct {
e_this,
e_class,
e_require,
pub fn isArray(self: Tag) bool {
switch (self) {
.e_array => {
@@ -2099,6 +2143,7 @@ pub const Expr = struct {
e_await: *E.Await,
e_yield: *E.Yield,
e_if: *E.If,
e_require: *E.Require,
e_require_or_require_resolve: *E.RequireOrRequireResolve,
e_import: *E.Import,
@@ -2540,26 +2585,32 @@ pub const Ast = struct {
// These are stored at the AST level instead of on individual AST nodes so
// they can be manipulated efficiently without a full AST traversal
import_records: ?[]ImportRecord = null,
import_records: []ImportRecord = &([_]ImportRecord{}),
hashbang: ?string = null,
directive: ?string = null,
url_for_css: ?string = null,
parts: std.ArrayList(Part),
symbols: std.ArrayList(Symbol),
module_scope: ?Scope,
parts: []Part,
symbols: []Symbol = &([_]Symbol{}),
module_scope: ?Scope = null,
// char_freq: *CharFreq,
exports_ref: ?Ref,
module_ref: ?Ref,
wrapper_ref: ?Ref,
exports_ref: ?Ref = null,
module_ref: ?Ref = null,
wrapper_ref: ?Ref = null,
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.
named_imports: std.AutoHashMap(Ref, NamedImport),
named_exports: std.StringHashMap(NamedExport),
top_level_symbol_to_parts: std.AutoHashMap(Ref, []u32),
export_star_import_records: std.ArrayList(u32),
named_imports: std.AutoHashMap(Ref, NamedImport) = undefined,
named_exports: std.StringHashMap(NamedExport) = undefined,
top_level_symbol_to_parts: std.AutoHashMap(Ref, []u32) = undefined,
export_star_import_records: std.ArrayList(u32) = undefined,
pub fn initTest(parts: []Part) Ast {
return Ast{
.parts = parts,
};
}
};
pub const Span = struct {
@@ -2691,23 +2742,23 @@ pub const AstData = struct {
// splitting.
pub const Part = struct {
stmts: []Stmt,
scopes: []*Scope,
scopes: []*Scope = &([_]*Scope{}),
// Each is an index into the file-level import record list
import_record_indices: std.ArrayList(u32),
import_record_indices: std.ArrayList(u32) = undefined,
// All symbols that are declared in this part. Note that a given symbol may
// have multiple declarations, and so may end up being declared in multiple
// parts (e.g. multiple "var" declarations with the same name). Also note
// that this list isn't deduplicated and may contain duplicates.
declared_symbols: std.ArrayList(DeclaredSymbol),
declared_symbols: std.ArrayList(DeclaredSymbol) = undefined,
// An estimate of the number of uses of all symbols used within this part.
symbol_uses: std.AutoHashMap(Ref, Symbol.Use),
symbol_uses: std.AutoHashMap(Ref, Symbol.Use) = undefined,
// The indices of the other parts in this file that are needed if this part
// is needed.
dependencies: std.ArrayList(Dependency),
dependencies: std.ArrayList(Dependency) = undefined,
// If true, this part can be removed if none of the declared symbols are
// used. If the file containing this part is imported, then all parts that

View File

@@ -35,11 +35,12 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
// };
// err: ?@This().Error,
log: logger.Log,
log: *logger.Log,
source: logger.Source,
current: usize = 0,
start: usize = 0,
end: usize = 0,
did_panic: bool = false,
approximate_newline_count: i32 = 0,
legacy_octal_loc: logger.Loc = logger.Loc.Empty,
previous_backslash_quote_in_jsx: logger.Range = logger.Range.None,
@@ -92,8 +93,7 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
return;
}
const errorMessage = std.fmt.allocPrint(self.allocator, format, args) catch unreachable;
self.log.addError(self.source, __loc, errorMessage) catch unreachable;
self.log.addErrorFmt(self.source, __loc, self.allocator, format, args) catch unreachable;
self.prev_error_loc = __loc;
var msg = self.log.msgs.items[self.log.msgs.items.len - 1];
msg.formatNoWriter(std.debug.panic);
@@ -114,7 +114,11 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
}
fn doPanic(self: *@This(), content: []const u8) void {
std.debug.panic("{s}", .{content});
if (@import("builtin").is_test) {
self.did_panic = true;
} else {
std.debug.panic("{s}", .{content});
}
}
pub fn codePointEql(self: *@This(), a: u8) bool {
@@ -324,7 +328,7 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
pub fn next(lexer: *@This()) void {
lexer.has_newline_before = lexer.end == 0;
lex: while (lexer.log.errors == 0) {
lex: while (true) {
lexer.start = lexer.end;
lexer.token = T.t_end_of_file;
@@ -841,11 +845,7 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
} else {
const contents = lexer.raw();
lexer.identifier = contents;
if (Keywords.get(contents)) |keyword| {
lexer.token = keyword;
} else {
lexer.token = T.t_identifier;
}
lexer.token = Keywords.get(contents) orelse T.t_identifier;
}
},
@@ -960,11 +960,11 @@ pub fn NewLexerType(comptime jsonOptions: ?JSONOptions) type {
};
}
pub fn init(log: logger.Log, source: logger.Source, allocator: *std.mem.Allocator) !@This() {
pub fn init(log: *logger.Log, source: *logger.Source, allocator: *std.mem.Allocator) !@This() {
var empty_string_literal: JavascriptString = undefined;
var lex = @This(){
.log = log,
.source = source,
.source = source.*,
.string_literal = empty_string_literal,
.prev_error_loc = logger.Loc.Empty,
.allocator = allocator,
@@ -1437,7 +1437,7 @@ pub const Lexer = NewLexerType(null);
pub const JSONLexer = NewLexerType(JSONOptions{ .allow_comments = false, .allow_trailing_commas = false });
pub const TSConfigJSONLexer = NewLexerType(JSONOptions{ .allow_comments = true, .allow_trailing_commas = true });
fn isIdentifierStart(codepoint: CodePoint) bool {
pub fn isIdentifierStart(codepoint: CodePoint) bool {
switch (codepoint) {
'a'...'z', 'A'...'Z', '_', '$' => {
return true;
@@ -1447,11 +1447,14 @@ fn isIdentifierStart(codepoint: CodePoint) bool {
},
}
}
fn isIdentifierContinue(codepoint: CodePoint) bool {
pub fn isIdentifierContinue(codepoint: CodePoint) bool {
switch (codepoint) {
'_', '$', '0'...'9', 'a'...'z', 'A'...'Z' => {
return true;
},
-1 => {
return false;
},
else => {},
}
@@ -1468,7 +1471,7 @@ fn isIdentifierContinue(codepoint: CodePoint) bool {
return false;
}
fn isWhitespace(codepoint: CodePoint) bool {
pub fn isWhitespace(codepoint: CodePoint) bool {
switch (codepoint) {
0x000B, // line tabulation
0x0009, // character tabulation
@@ -1528,18 +1531,15 @@ fn float64(num: anytype) callconv(.Inline) f64 {
return @intToFloat(f64, num);
}
fn test_lexer(contents: []const u8) @This() {
fn test_lexer(contents: []const u8) Lexer {
alloc.setup(std.heap.page_allocator) catch unreachable;
const msgs = std.ArrayList(logger.Msg).init(alloc.dynamic);
const log = logger.Log{
.msgs = msgs,
};
const source = logger.Source.initPathString(
var log = alloc.dynamic.create(logger.Log) catch unreachable;
log.* = logger.Log.init(alloc.dynamic);
var source = logger.Source.initPathString(
"index.js",
contents,
);
return @This().init(log, source, alloc.dynamic) catch unreachable;
return Lexer.init(log, &source, alloc.dynamic) catch unreachable;
}
// test "@This().next()" {
@@ -1554,35 +1554,49 @@ fn test_lexer(contents: []const u8) @This() {
// lex.next();
// }
fn expectStr(lexer: *Lexer, expected: string, actual: string) void {
if (lexer.log.errors > 0 or lexer.log.warnings > 0) {
std.debug.panic("{s}", .{lexer.log.msgs.items});
// const msg: logger.Msg = lexer.log.msgs.items[0];
// msg.formatNoWriter(std.debug.panic);
}
std.testing.expectEqual(lexer.log.errors, 0);
std.testing.expectEqual(lexer.log.warnings, 0);
std.testing.expectEqual(false, lexer.did_panic);
std.testing.expectEqual(@as(usize, 0), lexer.log.errors);
std.testing.expectEqual(@as(usize, 0), lexer.log.warnings);
std.testing.expectEqualStrings(expected, actual);
}
test "Lexer.next() simple" {
var lex = test_lexer("for (let i = 0; i < 100; i++) { }");
std.testing.expectEqualStrings("\"for\"", tokenToString.get(lex.token));
expectStr(&lex, "\"for\"", tokenToString.get(lex.token));
lex.next();
std.testing.expectEqualStrings("\"(\"", tokenToString.get(lex.token));
expectStr(&lex, "\"(\"", tokenToString.get(lex.token));
lex.next();
std.testing.expectEqualStrings("let", lex.raw());
expectStr(&lex, "let", lex.raw());
lex.next();
std.testing.expectEqualStrings("i", lex.raw());
expectStr(&lex, "i", lex.raw());
lex.next();
std.testing.expectEqualStrings("=", lex.raw());
expectStr(&lex, "=", lex.raw());
lex.next();
std.testing.expectEqualStrings("0", lex.raw());
expectStr(&lex, "0", lex.raw());
lex.next();
std.testing.expectEqualStrings(";", lex.raw());
expectStr(&lex, ";", lex.raw());
lex.next();
std.testing.expectEqualStrings("i", lex.raw());
expectStr(&lex, "i", lex.raw());
lex.next();
std.testing.expect(lex.number == 0.0);
std.testing.expectEqualStrings("<", lex.raw());
expectStr(&lex, "<", lex.raw());
lex.next();
std.testing.expect(lex.number == 100.0);
std.testing.expectEqualStrings("100", lex.raw());
expectStr(&lex, "100", lex.raw());
lex.next();
}
pub fn test_stringLiteralEquals(expected: string, source_text: string) void {
const msgs = std.ArrayList(logger.Msg).init(std.testing.allocator);
const log = logger.Log{
var msgs = std.ArrayList(logger.Msg).init(std.testing.allocator);
var log = logger.Log{
.msgs = msgs,
};
@@ -1593,7 +1607,7 @@ pub fn test_stringLiteralEquals(expected: string, source_text: string) void {
source_text,
);
var lex = try @This().init(log, source, std.heap.page_allocator);
var lex = try Lexer.init(&log, &source, std.heap.page_allocator);
while (!lex.token.isString() and lex.token != .t_end_of_file) {
lex.next();
}

View File

@@ -344,7 +344,7 @@ const PropertyOpts = struct {
pub const Parser = struct {
options: Options,
lexer: js_lexer.Lexer,
log: logger.Log,
log: *logger.Log,
source: logger.Source,
allocator: *std.mem.Allocator,
p: ?*P,
@@ -412,9 +412,7 @@ pub const Parser = struct {
return result;
}
pub fn init(transform: options.TransformOptions, allocator: *std.mem.Allocator) !Parser {
const log = logger.Log{ .msgs = List(logger.Msg).init(allocator) };
const source = logger.Source.initFile(transform.entry_point, allocator);
pub fn init(transform: options.TransformOptions, log: *logger.Log, source: *logger.Source, allocator: *std.mem.Allocator) !Parser {
const lexer = try js_lexer.Lexer.init(log, source, allocator);
return Parser{
.options = Options{
@@ -427,7 +425,7 @@ pub const Parser = struct {
},
.allocator = allocator,
.lexer = lexer,
.source = source,
.source = source.*,
.log = log,
.p = null,
};
@@ -471,7 +469,7 @@ const ParseStatementOptions = struct {
const P = struct {
allocator: *std.mem.Allocator,
options: Parser.Options,
log: logger.Log,
log: *logger.Log,
source: logger.Source,
lexer: js_lexer.Lexer,
allow_in: bool = false,
@@ -6249,7 +6247,7 @@ const P = struct {
return p.e(E.Missing{}, loc);
}
pub fn init(allocator: *std.mem.Allocator, log: logger.Log, source: logger.Source, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
pub fn init(allocator: *std.mem.Allocator, log: *logger.Log, source: logger.Source, lexer: js_lexer.Lexer, opts: Parser.Options) !*P {
var parser = try allocator.create(P);
parser.allocated_names = @TypeOf(parser.allocated_names).init(allocator);
parser.scopes_for_current_part = @TypeOf(parser.scopes_for_current_part).init(allocator);

View File

@@ -0,0 +1,448 @@
const std = @import("std");
const logger = @import("logger.zig");
const js_lexer = @import("js_lexer.zig");
const importRecord = @import("import_record.zig");
const js_ast = @import("js_ast.zig");
const options = @import("options.zig");
const alloc = @import("alloc.zig");
const rename = @import("renamer.zig");
const fs = @import("fs.zig");
usingnamespace @import("strings.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
const expect = std.testing.expect;
const ImportKind = importRecord.ImportKind;
const BindingNodeIndex = js_ast.BindingNodeIndex;
const Ref = js_ast.Ref;
const LocRef = js_ast.LocRef;
const S = js_ast.S;
const B = js_ast.B;
const G = js_ast.G;
const T = js_lexer.T;
const E = js_ast.E;
const Stmt = js_ast.Stmt;
const Expr = js_ast.Expr;
const Binding = js_ast.Binding;
const Symbol = js_ast.Symbol;
const Level = js_ast.Op.Level;
const Op = js_ast.Op;
const Scope = js_ast.Scope;
const locModuleScope = logger.Loc.Empty;
const Ast = js_ast.Ast;
fn notimpl() void {
std.debug.panic("Not implemented yet!", .{});
}
pub const SourceMapChunk = struct {
buffer: MutableString,
end_state: State = State{},
final_generated_column: usize = 0,
should_ignore: bool = false,
// Coordinates in source maps are stored using relative offsets for size
// reasons. When joining together chunks of a source map that were emitted
// in parallel for different parts of a file, we need to fix up the first
// segment of each chunk to be relative to the end of the previous chunk.
pub const State = struct {
// This isn't stored in the source map. It's only used by the bundler to join
// source map chunks together correctly.
generated_line: i32 = 0,
// These are stored in the source map in VLQ format.
generated_column: i32 = 0,
source_index: i32 = 0,
original_line: i32 = 0,
original_column: i32 = 0,
};
};
pub const Options = struct {
to_module_ref: js_ast.Ref,
indent: usize = 0,
// If we're writing out a source map, this table of line start indices lets
// us do binary search on to figure out what line a given AST node came from
// line_offset_tables: []LineOffsetTable
};
pub const PrintResult = struct { js: string, source_map: ?SourceMapChunk = null };
const ExprFlag = enum {
forbid_call,
forbid_in,
has_non_optional_chain_parent,
expr_result_is_unused,
};
pub fn NewPrinter(comptime ascii_only: bool) type {
// comptime const comptime_buf_len = 64;
// comptime var comptime_buf = [comptime_buf_len]u8{};
// comptime var comptime_buf_i: usize = 0;
return struct {
symbols: Symbol.Map,
import_records: []importRecord.ImportRecord,
js: MutableString,
needs_semicolon: bool = false,
stmt_start: i32 = -1,
options: Options,
export_default_start: i32 = -1,
arrow_expr_start: i32 = -1,
for_of_init_start: i32 = -1,
prev_op: Op.Code = Op.Code.bin_add,
prev_op_end: i32 = -1,
prev_num_end: i32 = -1,
prev_reg_exp_end: i32 = -1,
call_target: ?Expr.Data = null,
int_to_bytes_buffer: [64]u8 = [_]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
allocator: *std.mem.Allocator,
const Printer = @This();
pub fn comptime_flush(p: *Printer) void {}
// pub fn comptime_flush(p: *Printer) callconv(.Inline) void {
// const result = comptime {
// if (comptime_buf_i > 0) {
// return comptime_buf[0..comptime_buf_i];
// } else {
// return "";
// }
// };
// if (result.len) {
// p.print(result);
// comptime {
// if (comptime_buf_i > 0) {
// comptime_buf_i = 0;
// while (comptime_buf_i < comptime_buf_i) {
// comptime_buf[comptime_buf_i] = 0;
// comptime_buf_i += 1;
// }
// comptime_buf_i = 0;
// }
// }
// }
// }
// pub fn comptime_print(p: *Printer, str: comptime []const u8) callconv(.Inline) void {
// comptime const needsFlush = (str.len + comptime_buf_i >= comptime_buf_len - 1);
// if (needsFlush) {
// p.comptime_flush();
// }
// comptime {
// if (str.len > 63) {
// @compileError("comptime_print buffer overflow");
// return;
// }
// }
// comptime {
// comptime str_i = 0;
// while (str_i < str.len) {
// comptime_buf[comptime_buf_i] = str[str_i];
// comptime_buf_i += 1;
// str_i += 1;
// }
// }
// }
pub fn print(p: *Printer, str: string) void {
p.js.append(str) catch unreachable;
}
pub fn unsafePrint(p: *Printer, str: string) void {
p.js.appendAssumeCapacity(str);
}
pub fn printIndent(p: *Printer) void {
comptime_flush(p);
if (p.options.indent == 0) {
return;
}
p.js.growBy(p.options.indent * " ".len) catch unreachable;
while (p.options.indent > 0) {
p.unsafePrint(" ");
p.options.indent -= 1;
}
}
pub fn printSpace(p: *Printer) void {
p.print(" ");
}
pub fn printNewline(p: *Printer) void {
notimpl();
}
pub fn printSemicolonAfterStatement(p: *Printer) void {
p.print(";\n");
}
pub fn printSemicolonIfNeeded(p: *Printer) void {
notimpl();
}
pub fn printSpaceBeforeIdentifier(p: *Printer) void {
const n = p.js.len();
if (n > 0 and (js_lexer.isIdentifierContinue(p.js.list.items[n - 1]) or n == p.prev_reg_exp_end)) {
p.print(" ");
}
}
pub fn printDotThenPrefix(p: *Printer) Level {
return .lowest;
}
pub fn printUndefined(level: Level) void {
notimpl();
}
pub fn printBody(stmt: Stmt) void {
notimpl();
}
pub fn printBlock(loc: logger.Loc, stmts: []Stmt) void {
notimpl();
}
pub fn printDecls(keyword: string, decls: []G.Decl, flags: ExprFlag) void {
notimpl();
}
// noop for now
pub fn addSourceMapping(p: *Printer, loc: logger.Loc) void {}
pub fn printSymbol(p: *Printer, ref: Ref) void {
notimpl();
}
pub fn printClauseAlias(p: *Printer, alias: string) void {
notimpl();
}
pub fn printFunc(p: *Printer, func: G.Fn) void {
notimpl();
}
pub fn printClass(p: *Printer, class: G.Class) void {
notimpl();
}
pub fn printExpr(p: *Printer, expr: Expr, level: Level, flags: ExprFlag) void {
p.addSourceMapping(expr.loc);
switch (expr.data) {
.e_missing => |e| {
notimpl();
},
.e_undefined => |e| {
notimpl();
},
.e_super => |e| {
notimpl();
},
.e_null => |e| {
notimpl();
},
.e_this => |e| {
notimpl();
},
.e_spread => |e| {
notimpl();
},
.e_new_target => |e| {
notimpl();
},
.e_import_meta => |e| {
notimpl();
},
.e_new => |e| {
notimpl();
},
.e_call => |e| {
notimpl();
},
.e_require => |e| {
notimpl();
},
.e_require_or_require_resolve => |e| {
notimpl();
},
.e_import => |e| {
notimpl();
},
.e_dot => |e| {
notimpl();
},
.e_index => |e| {
notimpl();
},
.e_if => |e| {
notimpl();
},
.e_arrow => |e| {
notimpl();
},
.e_function => |e| {
notimpl();
},
.e_class => |e| {
notimpl();
},
.e_array => |e| {
notimpl();
},
.e_object => |e| {
notimpl();
},
.e_boolean => |e| {
p.printSpaceBeforeIdentifier();
p.print(if (e.value) "true" else "false");
},
.e_string => |e| {
notimpl();
},
.e_template => |e| {
notimpl();
},
.e_reg_exp => |e| {
notimpl();
},
.e_big_int => |e| {
notimpl();
},
.e_number => |e| {
notimpl();
},
.e_identifier => |e| {
notimpl();
},
.e_import_identifier => |e| {
notimpl();
},
.e_await => |e| {
notimpl();
},
.e_yield => |e| {
notimpl();
},
.e_unary => |e| {
notimpl();
},
.e_binary => |e| {
notimpl();
},
else => {
std.debug.panic("Unexpected expression of type {s}", .{expr.data});
},
}
}
pub fn printProperty(p: *Printer, prop: G.Property) void {
notimpl();
}
pub fn printBinding(p: *Printer, binding: Binding) void {
notimpl();
}
pub fn printStmt(p: *Printer, stmt: Stmt) !void {
p.comptime_flush();
p.addSourceMapping(stmt.loc);
switch (stmt.data) {
.s_comment => |s| {
p.printIndentedComment(s.text);
},
.s_function => |s| {},
.s_class => |s| {},
.s_empty => |s| {},
.s_export_default => |s| {},
.s_export_star => |s| {},
.s_export_clause => |s| {},
.s_export_from => |s| {},
.s_local => |s| {},
.s_if => |s| {},
.s_do_while => |s| {},
.s_for_in => |s| {},
.s_for_of => |s| {},
.s_while => |s| {},
.s_with => |s| {},
.s_label => |s| {},
.s_try => |s| {},
.s_for => |s| {},
.s_switch => |s| {},
.s_import => |s| {},
.s_block => |s| {},
.s_debugger => |s| {},
.s_directive => |s| {},
.s_break => |s| {},
.s_continue => |s| {},
.s_return => |s| {},
.s_throw => |s| {},
.s_expr => |s| {
p.printIndent();
p.stmt_start = p.js.lenI();
p.printExpr(s.value, .lowest, .expr_result_is_unused);
p.printSemicolonAfterStatement();
},
else => {
std.debug.panic("Unexpected statement of type {s}", .{@TypeOf(stmt)});
},
}
}
pub fn printIndentedComment(p: *Printer, _text: string) void {
var text = _text;
if (strings.startsWith(text, "/*")) {
// Re-indent multi-line comments
while (strings.indexOfChar(text, '\n')) |newline_index| {
p.printIndent();
p.print(text[0 .. newline_index + 1]);
text = text[newline_index + 1 ..];
}
p.printIndent();
p.print(text);
p.printNewline();
} else {
// Print a mandatory newline after single-line comments
p.printIndent();
p.print(text);
p.print("\n");
}
}
pub fn init(allocator: *std.mem.Allocator, tree: Ast, symbols: Symbol.Map, opts: Options) !Printer {
return Printer{
.allocator = allocator,
.import_records = tree.import_records,
.options = opts,
.symbols = symbols,
.js = try MutableString.init(allocator, 1024),
};
}
};
}
const UnicodePrinter = NewPrinter(false);
const AsciiPrinter = NewPrinter(true);
pub fn printAst(allocator: *std.mem.Allocator, tree: Ast, symbols: js_ast.Symbol.Map, ascii_only: bool, opts: Options) !PrintResult {
if (ascii_only) {
var printer = try AsciiPrinter.init(allocator, tree, symbols, opts);
for (tree.parts) |part| {
for (part.stmts) |stmt| {
try printer.printStmt(stmt);
}
}
return PrintResult{
.js = printer.js.toOwnedSlice(),
};
} else {
var printer = try UnicodePrinter.init(allocator, tree, symbols, opts);
for (tree.parts) |part| {
for (part.stmts) |stmt| {
try printer.printStmt(stmt);
}
}
return PrintResult{
.js = printer.js.toOwnedSlice(),
};
}
}

View File

@@ -11,6 +11,7 @@ usingnamespace @import("strings.zig");
usingnamespace @import("ast/base.zig");
usingnamespace js_ast.G;
const expect = std.testing.expect;
const ImportKind = importRecord.ImportKind;
const BindingNodeIndex = js_ast.BindingNodeIndex;
@@ -41,13 +42,13 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const Lexer = if (opts.allow_comments) js_lexer.TSConfigJSONLexer else js_lexer.JSONLexer;
return struct {
lexer: Lexer,
source: logger.Source,
log: logger.Log,
source: *logger.Source,
log: *logger.Log,
allocator: *std.mem.Allocator,
pub fn init(allocator: *std.mem.Allocator, source: logger.Source, log: logger.Log) Parser {
pub fn init(allocator: *std.mem.Allocator, source: *logger.Source, log: *logger.Log) !Parser {
return Parser{
.lexer = Lexer.init(log, source, allocator),
.lexer = try Lexer.init(log, source, allocator),
.allocator = allocator,
.log = log,
.source = source,
@@ -63,7 +64,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
return Expr.alloc(p.allocator, t, loc);
}
}
pub fn parseExpr(p: *Parser) ?Expr {
pub fn parseExpr(p: *Parser) Expr {
const loc = p.lexer.loc();
switch (p.lexer.token) {
@@ -104,7 +105,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
.t_open_bracket => {
p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
var exprs = List(Expr).init(p.allocator);
var exprs = std.ArrayList(Expr).init(p.allocator);
while (p.lexer.token != .t_close_bracket) {
if (exprs.items.len > 0) {
@@ -121,11 +122,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
}
}
if (p.parseExpr()) |expr| {
try exprs.append(expr);
} else {
break;
}
exprs.append(p.parseExpr()) catch unreachable;
}
if (p.lexer.has_newline_before) {
@@ -137,8 +134,8 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
.t_open_brace => {
p.lexer.next();
var is_single_line = !p.lexer.has_newline_before;
var properties = List(G.Property).init(p.allocator);
var duplicates = std.StringHashMap(u0).init(p.allocator);
var properties = std.ArrayList(G.Property).init(p.allocator);
var duplicates = std.StringHashMap(u1).init(p.allocator);
while (p.lexer.token != .t_close_brace) {
if (properties.items.len > 0) {
@@ -153,17 +150,17 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
var key_range = p.lexer.range();
var key = p.e(E.String{ .value = key_string }, key_range.loc);
p.lexer.expect(.t_string_literal);
var key_text = p.lexer.utf16ToString();
var key_text = p.lexer.utf16ToString(key_string);
// Warn about duplicate keys
if (duplicates.contains(key_text)) {
p.log.addRangeWarningFmt(p.source, r, "Duplicate key \"{s}\" in object literal", .{key_text}) catch unreachable;
} else {
duplicates.put(key_text, 0) catch unreachable;
const entry = duplicates.getOrPut(key_text) catch unreachable;
if (entry.found_existing) {
p.log.addRangeWarningFmt(p.source.*, key_range, p.allocator, "Duplicate key \"{s}\" in object literal", .{key_text}) catch unreachable;
}
p.lexer.expect(.t_colon);
var value = p.parseExpr() orelse return null;
try properties.append(G.Property{ .key = key, .value = value });
var value = p.parseExpr();
properties.append(G.Property{ .key = key, .value = value }) catch unreachable;
}
is_single_line = if (p.lexer.has_newline_before) false else is_single_line;
@@ -175,7 +172,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
},
else => {
p.lexer.unexpected();
return null;
return p.e(E.Missing{}, loc);
},
}
}
@@ -186,7 +183,7 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
if (p.lexer.token == closer) {
if (!opts.allow_trailing_commas) {
p.log.addRangeError(p.source, comma_range, "JSON does not support trailing commas") catch unreachable;
p.log.addRangeError(p.source.*, comma_range, "JSON does not support trailing commas") catch unreachable;
}
return false;
}
@@ -199,14 +196,99 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
const JSONParser = JSONLikeParser(js_lexer.JSONOptions{});
const TSConfigParser = JSONLikeParser(js_lexer.JSONOptions{ .allow_comments = true, .allow_trailing_commas = true });
pub fn ParseJSON(log: logger.Log, source: logger.Source) !?Expr {
var parser = JSONParser.init(allocator, log, source);
pub fn ParseJSON(source: *logger.Source, log: *logger.Log, allocator: *std.mem.Allocator) !Expr {
var parser = try JSONParser.init(allocator, source, log);
return try parser.parseExpr();
return parser.parseExpr();
}
pub fn ParseTSConfig(log: logger.Loc, source: logger.Source) !?Expr {
var parser = TSConfigParser.init(allocator, log, source);
pub fn ParseTSConfig(log: logger.Loc, source: logger.Source, allocator: *std.mem.Allocator) !Expr {
var parser = try TSConfigParser.init(allocator, log, source);
return try parser.parseExpr();
return parser.parseExpr();
}
const duplicateKeyJson = "{ \"name\": \"valid\", \"name\": \"invalid\" }";
fn expectPrintedJSON(_contents: string, expected: string) void {
if (alloc.dynamic_manager == null) {
alloc.setup(std.heap.page_allocator) catch unreachable;
}
var contents = alloc.dynamic.alloc(u8, _contents.len + 1) catch unreachable;
std.mem.copy(u8, contents, _contents);
contents[contents.len - 1] = ';';
var log = logger.Log.init(alloc.dynamic);
const js_printer = @import("js_printer.zig");
const renamer = @import("renamer.zig");
var source = logger.Source.initPathString(
"source.json",
contents,
);
const expr = try ParseJSON(&source, &log, alloc.dynamic);
var stmt = Stmt.alloc(std.heap.page_allocator, S.SExpr{ .value = expr }, logger.Loc{ .start = 0 });
var part = js_ast.Part{
.stmts = &([_]Stmt{stmt}),
};
const tree = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
var symbol_map = Symbol.Map{};
if (log.msgs.items.len > 0) {
std.debug.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
}
const result = js_printer.printAst(std.heap.page_allocator, tree, symbol_map, true, js_printer.Options{ .to_module_ref = Ref{ .inner_index = 0 } }) catch unreachable;
var js = result.js;
if (js.len > 1) {
while (js[js.len - 1] == '\n') {
js = js[0 .. js.len - 1];
}
if (js[js.len - 1] == ';') {
js = js[0 .. js.len - 1];
}
}
std.testing.expectEqualStrings(expected, js);
}
test "ParseJSON" {
expectPrintedJSON("true", "true");
expectPrintedJSON("false", "false");
}
test "ParseJSON DuplicateKey warning" {
alloc.setup(std.heap.page_allocator) catch unreachable;
var log = logger.Log.init(alloc.dynamic);
var source = logger.Source.initPathString(
"package.json",
duplicateKeyJson,
);
const expr = try ParseJSON(&source, &log, alloc.dynamic);
const tag = @as(Expr.Tag, expr.data);
expect(tag == .e_object);
const object = expr.data.e_object;
std.testing.expectEqual(@as(usize, 2), object.properties.len);
const name1 = object.properties[0];
expect(name1.key != null);
expect(name1.value != null);
expect(Expr.Tag.e_string == @as(Expr.Tag, name1.value.?.data));
expect(Expr.Tag.e_string == @as(Expr.Tag, name1.key.?.data));
expect(strings.eqlUtf16("name", name1.key.?.data.e_string.value));
expect(strings.eqlUtf16("valid", name1.value.?.data.e_string.value));
const name2 = object.properties[1];
expect(name2.key != null);
expect(name2.value != null);
expect(Expr.Tag.e_string == @as(Expr.Tag, name2.value.?.data));
expect(Expr.Tag.e_string == @as(Expr.Tag, name2.key.?.data));
expect(strings.eqlUtf16("name", name2.key.?.data.e_string.value));
std.testing.expectEqualStrings("invalid", try name2.value.?.data.e_string.string(alloc.dynamic));
std.testing.expectEqual(@as(usize, 1), log.msgs.items.len);
}

View File

@@ -103,7 +103,7 @@ pub const Msg = struct {
kind: Kind = Kind.err,
data: Data,
notes: ?[]Data = null,
pub fn format(msg: *const Msg, to: anytype, formatterFunc: @TypeOf(std.fmt.format)) !void {
pub fn doFormat(msg: *const Msg, to: anytype, formatterFunc: @TypeOf(std.fmt.format)) !void {
try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, msg.data.location.?.file, msg.data.location.?.line, msg.data.location.?.column });
}
@@ -127,10 +127,16 @@ pub const Range = packed struct {
pub const Log = struct {
debug: bool = false,
warnings: u8 = 0,
errors: u8 = 0,
warnings: usize = 0,
errors: usize = 0,
msgs: ArrayList(Msg),
pub fn init(allocator: *std.mem.Allocator) Log {
return Log{
.msgs = ArrayList(Msg).init(allocator),
};
}
pub fn addVerbose(log: *Log, source: ?Source, loc: Loc, text: string) !void {
try log.addMsg(Msg{
.kind = .verbose,
@@ -179,7 +185,7 @@ pub const Log = struct {
}
pub fn addWarningFmt(log: *Log, source: ?Source, l: Loc, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.errors += 1;
log.warnings += 1;
try log.addMsg(Msg{
.kind = .err,
.data = rangeData(source, Range{ .loc = l }, std.fmt.allocPrint(allocator, text, args) catch unreachable),
@@ -187,9 +193,9 @@ pub const Log = struct {
}
pub fn addRangeWarningFmt(log: *Log, source: ?Source, r: Range, allocator: *std.mem.Allocator, comptime text: string, args: anytype) !void {
log.errors += 1;
log.warnings += 1;
try log.addMsg(Msg{
.kind = .err,
.kind = .warn,
.data = rangeData(source, r, std.fmt.allocPrint(allocator, text, args) catch unreachable),
});
}
@@ -241,7 +247,7 @@ pub const Log = struct {
// TODO:
pub fn print(self: *Log, to: anytype) !void {
for (self.msgs.items) |msg| {
try msg.format(to, std.fmt.format);
try msg.doFormat(to, std.fmt.format);
}
}
};
@@ -406,7 +412,8 @@ pub fn rangeData(source: ?Source, r: Range, text: string) Data {
}
test "print msg" {
var log = Log{ .msgs = ArrayList(Msg).init(std.testing.allocator) };
var msgs = ArrayList(Msg).init(std.testing.allocator);
var log = Log{ .msgs = msgs };
defer log.msgs.deinit();
var filename = "test.js".*;
var syntax = "for(i = 0;)".*;
@@ -420,5 +427,5 @@ test "print msg" {
const stdout = std.io.getStdOut().writer();
try log.print(stdout);
// try log.print(stdout);
}

View File

@@ -20,7 +20,10 @@ pub fn main() anyerror!void {
const entryPointName = "/var/foo/index.js";
const code = "for (let i = 0; i < 100; i++) { console.log('hi') aposkdpoaskdpokasdpokasdpokasdpokasdpoaksdpoaksdpoaskdpoaksdpoaksdpoaskdpoaskdpoasdk; ";
var parser = try js_parser.Parser.init(try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code), alloc.dynamic);
var log = logger.Log.init(alloc.dynamic);
const opts = try options.TransformOptions.initUncached(alloc.dynamic, entryPointName, code);
var source = logger.Source.initFile(opts.entry_point, alloc.dynamic);
var parser = try js_parser.Parser.init(opts, &log, &source, alloc.dynamic);
var res = try parser.parse();
std.debug.print("{s}", .{res});

22
src/renamer.zig Normal file
View File

@@ -0,0 +1,22 @@
const js_ast = @import("js_ast.zig");
pub const Renamer = struct {
symbols: js_ast.Symbol.Map,
pub fn init(symbols: js_ast.Symbol.Map) Renamer {
return Renamer{ .symbols = symbols };
}
pub fn nameForSymbol(renamer: *Renamer, ref: js_ast.Ref) string {
const resolved = renamer.symbols.follow(ref);
const symbol = renamer.symbols.get(resolved) orelse std.debug.panic("Internal error: symbol not found for ref: {s}", .{resolved});
return symbol.original_name;
}
};
pub const DisabledRenamer = struct {
pub fn init(symbols: js_ast.Symbol.Map) DisabledRenamer {}
pub fn nameForSymbol(renamer: *Renamer, ref: js_ast.Ref) callconv(.Inline) string {
@compileError("DisabledRunner called");
}
};

View File

@@ -25,6 +25,52 @@ pub fn indexOf(self: string, str: u8) ?usize {
return std.mem.indexOf(u8, self, str);
}
pub fn startsWith(self: string, str: string) bool {
if (str.len > self.len) {
return false;
}
var i: usize = 0;
while (i < str.len) {
if (str[i] != self[i]) {
return false;
}
i += 1;
}
return true;
}
pub fn endsWithAny(self: string, str: string) bool {
const end = self[self.len - 1];
for (str) |char| {
if (char == end) {
return true;
}
}
return false;
}
pub fn lastNonwhitespace(self: string, str: string) bool {
}
pub fn endsWithAnyComptime(self: string, comptime str: string) bool {
if (str.len < 10) {
const last = self[self.len - 1];
inline while (str) |char| {
if (char == last) {
return true;
}
}
return false;
} else {
return endsWithAny(self, str);
}
}
pub fn eql(self: string, other: anytype) bool {
return std.mem.eql(u8, self, other);
}

View File

@@ -7,6 +7,18 @@ pub const MutableString = struct {
allocator: *std.mem.Allocator,
list: std.ArrayListUnmanaged(u8),
pub const Writer = std.io.Writer(@This(), anyerror, MutableString.writeAll);
pub fn writer(self: *MutableString) Writer {
return Writer{
.context = self,
};
}
pub fn writeAll(self: *MutableString, bytes: []u8) !usize {
try self.list.appendSlice(self.allocator, bytes);
return self.list.items.len;
}
pub fn init(allocator: *std.mem.Allocator, capacity: usize) !MutableString {
return MutableString{ .allocator = allocator, .list = try std.ArrayListUnmanaged(u8).initCapacity(allocator, capacity) };
}
@@ -61,25 +73,29 @@ pub const MutableString = struct {
try self.list.replaceRange(self.allocator, 0, std.mem.len(str[0..]), str[0..]);
}
}
pub fn growBy(self: *MutableString, amount: usize) callconv(.Inline) !void {
try self.list.ensureCapacity(self.allocator, self.list.capacity + amount);
}
pub fn deinit(self: *MutableString) !void {
self.list.deinit(self.allocator);
}
pub fn appendChar(self: *MutableString, char: u8) !void {
pub fn appendChar(self: *MutableString, char: u8) callconv(.Inline) !void {
try self.list.append(self.allocator, char);
}
pub fn appendCharAssumeCapacity(self: *MutableString, char: u8) void {
pub fn appendCharAssumeCapacity(self: *MutableString, char: u8) callconv(.Inline) void {
self.list.appendAssumeCapacity(char);
}
pub fn append(self: *MutableString, char: []const u8) !void {
pub fn append(self: *MutableString, char: []const u8) callconv(.Inline) !void {
try self.list.appendSlice(self.allocator, char);
}
pub fn appendAssumeCapacity(self: *MutableString, char: []const u8) !void {
try self.list.appendSliceAssumeCapacity(self.allocator, char);
pub fn appendAssumeCapacity(self: *MutableString, char: []const u8) callconv(.Inline) void {
self.list.appendSliceAssumeCapacity(
char,
);
}
pub fn lenI(self: *MutableString) callconv(.Inline) i32 {
return @intCast(i32, self.list.items.len);
}
pub fn toOwnedSlice(self: *MutableString) string {