This commit is contained in:
Jarred Sumner
2021-05-27 21:35:28 -07:00
parent b6e7f01e6a
commit cbf0b77e52
8 changed files with 1717 additions and 838 deletions

View File

@@ -247,7 +247,10 @@ pub const Bundler = struct {
const output_file = try bundler.print(
result,
);
js_ast.Stmt.Data.Store.reset();
js_ast.Expr.Data.Store.reset();
return output_file;
}
@@ -286,6 +289,7 @@ pub const Bundler = struct {
js_printer.Options{ .to_module_ref = Ref.RuntimeRef },
&_linker,
);
// allocator.free(result.source.contents);
return options.OutputFile{
.path = out_path,

View File

@@ -387,6 +387,9 @@ pub const Cli = struct {
}
}
Output.println("Expr count: {d}", .{js_ast.Expr.icount});
Output.println("Stmt count: {d}", .{js_ast.Stmt.icount});
if (!did_write) {
for (result.output_files) |file, i| {
try writer.writeAll(file.contents);

View File

@@ -63,63 +63,63 @@ pub const DefineData = struct {
var user_defines = UserDefines.init(allocator);
try user_defines.ensureCapacity(defines.count());
var iter = defines.iterator();
while (iter.next()) |entry| {
var splitter = std.mem.split(entry.key, ".");
while (splitter.next()) |part| {
if (!js_lexer.isIdentifier(part)) {
if (strings.eql(part, entry.key)) {
try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
} else {
try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
}
break;
}
}
// var iter = defines.iterator();
// while (iter.next()) |entry| {
// var splitter = std.mem.split(entry.key, ".");
// while (splitter.next()) |part| {
// if (!js_lexer.isIdentifier(part)) {
// if (strings.eql(part, entry.key)) {
// try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" must be a valid identifier", .{entry.key});
// } else {
// try log.addErrorFmt(null, logger.Loc{}, allocator, "The define key \"{s}\" contains invalid identifier \"{s}\"", .{ part, entry.key });
// }
// break;
// }
// }
if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
ident.ref = Ref.None;
ident.can_be_removed_if_unused = true;
user_defines.putAssumeCapacity(
entry.key,
DefineData{
.value = js_ast.Expr.Data{ .e_identifier = ident },
.original_name = entry.value,
.can_be_removed_if_unused = true,
},
);
// user_defines.putAssumeCapacity(
// entry.key,
// DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
// );
continue;
}
var _log = log;
var source = logger.Source{
.contents = entry.value,
.path = defines_path,
.identifier_name = "defines",
.key_path = fs.Path.initWithNamespace("defines", "internal"),
};
var expr = try json_parser.ParseJSON(&source, _log, allocator);
var data: js_ast.Expr.Data = undefined;
switch (expr.data) {
.e_missing => {
continue;
},
.e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
data = expr.data;
},
else => {
continue;
},
}
// if (js_lexer.isIdentifier(entry.value) and !js_lexer.Keywords.has(entry.value)) {
// var ident: *js_ast.E.Identifier = try allocator.create(js_ast.E.Identifier);
// ident.ref = Ref.None;
// ident.can_be_removed_if_unused = true;
// user_defines.putAssumeCapacity(
// entry.key,
// DefineData{
// .value = js_ast.Expr.Data{ .e_identifier = ident },
// .original_name = entry.value,
// .can_be_removed_if_unused = true,
// },
// );
// // user_defines.putAssumeCapacity(
// // entry.key,
// // DefineData{ .value = js_ast.Expr.Data{.e_identifier = } },
// // );
// continue;
// }
// var _log = log;
// var source = logger.Source{
// .contents = entry.value,
// .path = defines_path,
// .identifier_name = "defines",
// .key_path = fs.Path.initWithNamespace("defines", "internal"),
// };
// var expr = try json_parser.ParseJSON(&source, _log, allocator);
// var data: js_ast.Expr.Data = undefined;
// switch (expr.data) {
// .e_missing => {
// continue;
// },
// .e_null, .e_boolean, .e_string, .e_number, .e_object, .e_array => {
// data = expr.data;
// },
// else => {
// continue;
// },
// }
user_defines.putAssumeCapacity(entry.key, DefineData{
.value = data,
});
}
// user_defines.putAssumeCapacity(entry.key, DefineData{
// .value = data,
// });
// }
return user_defines;
}
@@ -157,117 +157,118 @@ pub const Define = struct {
define.allocator = allocator;
define.identifiers = std.StringHashMap(IdentifierDefine).init(allocator);
define.dots = std.StringHashMap([]DotDefine).init(allocator);
try define.identifiers.ensureCapacity(641);
try define.dots.ensureCapacity(64);
var undefined_val = try allocator.create(js_ast.E.Undefined);
var val = js_ast.Expr.Data{ .e_undefined = undefined_val };
var ident_define = IdentifierDefine{
.value = val,
};
var value_define = DefineData{ .value = val, .valueless = true };
// Step 1. Load the globals into the hash tables
for (GlobalDefinesKey) |global| {
if (global.len == 1) {
// TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
define.identifiers.putAssumeCapacity(global[0], value_define);
} else {
const key = global[global.len - 1];
// TODO: move this to comptime
// TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
if (define.dots.getEntry(key)) |entry| {
var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
list.appendSliceAssumeCapacity(entry.value);
list.appendAssumeCapacity(DotDefine{
.parts = global[0..global.len],
.data = value_define,
});
define.dots.putAssumeCapacity(key, list.toOwnedSlice());
} else {
var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
list.appendAssumeCapacity(DotDefine{
.parts = global[0..global.len],
.data = value_define,
});
define.dots.putAssumeCapacity(key, list.toOwnedSlice());
}
}
}
var nan_val = try allocator.create(js_ast.E.Number);
nan_val.value = std.math.nan_f64;
var inf_val = try allocator.create(js_ast.E.Number);
inf_val.value = std.math.inf_f64;
// Step 2. Swap in certain literal values because those can be constant folded
define.identifiers.putAssumeCapacity("undefined", value_define);
define.identifiers.putAssumeCapacity("NaN", DefineData{
.value = js_ast.Expr.Data{ .e_number = nan_val },
});
define.identifiers.putAssumeCapacity("Infinity", DefineData{
.value = js_ast.Expr.Data{ .e_number = inf_val },
});
// Step 3. Load user data into hash tables
// At this stage, user data has already been validated.
if (_user_defines) |user_defines| {
var iter = user_defines.iterator();
while (iter.next()) |user_define| {
// If it has a dot, then it's a DotDefine.
// e.g. process.env.NODE_ENV
if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
const tail = user_define.key[last_dot + 1 .. user_define.key.len];
const remainder = user_define.key[0..last_dot];
const count = std.mem.count(u8, remainder, ".") + 1;
var parts = try allocator.alloc(string, count + 1);
var splitter = std.mem.split(remainder, ".");
var i: usize = 0;
while (splitter.next()) |split| : (i += 1) {
parts[i] = split;
}
parts[i] = tail;
var didFind = false;
var initial_values: []DotDefine = &([_]DotDefine{});
// "NODE_ENV"
if (define.dots.getEntry(tail)) |entry| {
for (entry.value) |*part| {
// ["process", "env"] === ["process", "env"] (if that actually worked)
if (arePartsEqual(part.parts, parts)) {
part.data = part.data.merge(user_define.value);
didFind = true;
break;
}
}
initial_values = entry.value;
}
if (!didFind) {
var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
if (initial_values.len > 0) {
list.appendSliceAssumeCapacity(initial_values);
}
list.appendAssumeCapacity(DotDefine{
.data = user_define.value,
// TODO: do we need to allocate this?
.parts = parts,
});
try define.dots.put(tail, list.toOwnedSlice());
}
} else {
// e.g. IS_BROWSER
try define.identifiers.put(user_define.key, user_define.value);
}
}
}
return define;
// try define.identifiers.ensureCapacity(641);
// try define.dots.ensureCapacity(64);
// var undefined_val = try allocator.create(js_ast.E.Undefined);
// var val = js_ast.Expr.Data{ .e_undefined = undefined_val };
// var ident_define = IdentifierDefine{
// .value = val,
// };
// var value_define = DefineData{ .value = val, .valueless = true };
// // Step 1. Load the globals into the hash tables
// for (GlobalDefinesKey) |global| {
// if (global.len == 1) {
// // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
// define.identifiers.putAssumeCapacity(global[0], value_define);
// } else {
// const key = global[global.len - 1];
// // TODO: move this to comptime
// // TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
// if (define.dots.getEntry(key)) |entry| {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, entry.value.len + 1);
// list.appendSliceAssumeCapacity(entry.value);
// list.appendAssumeCapacity(DotDefine{
// .parts = global[0..global.len],
// .data = value_define,
// });
// define.dots.putAssumeCapacity(key, list.toOwnedSlice());
// } else {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, 1);
// list.appendAssumeCapacity(DotDefine{
// .parts = global[0..global.len],
// .data = value_define,
// });
// define.dots.putAssumeCapacity(key, list.toOwnedSlice());
// }
// }
// }
// var nan_val = try allocator.create(js_ast.E.Number);
// nan_val.value = std.math.nan_f64;
// var inf_val = try allocator.create(js_ast.E.Number);
// inf_val.value = std.math.inf_f64;
// // Step 2. Swap in certain literal values because those can be constant folded
// define.identifiers.putAssumeCapacity("undefined", value_define);
// define.identifiers.putAssumeCapacity("NaN", DefineData{
// .value = js_ast.Expr.Data{ .e_number = nan_val },
// });
// define.identifiers.putAssumeCapacity("Infinity", DefineData{
// .value = js_ast.Expr.Data{ .e_number = inf_val },
// });
// // Step 3. Load user data into hash tables
// // At this stage, user data has already been validated.
// if (_user_defines) |user_defines| {
// var iter = user_defines.iterator();
// while (iter.next()) |user_define| {
// // If it has a dot, then it's a DotDefine.
// // e.g. process.env.NODE_ENV
// if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
// const tail = user_define.key[last_dot + 1 .. user_define.key.len];
// const remainder = user_define.key[0..last_dot];
// const count = std.mem.count(u8, remainder, ".") + 1;
// var parts = try allocator.alloc(string, count + 1);
// var splitter = std.mem.split(remainder, ".");
// var i: usize = 0;
// while (splitter.next()) |split| : (i += 1) {
// parts[i] = split;
// }
// parts[i] = tail;
// var didFind = false;
// var initial_values: []DotDefine = &([_]DotDefine{});
// // "NODE_ENV"
// if (define.dots.getEntry(tail)) |entry| {
// for (entry.value) |*part| {
// // ["process", "env"] === ["process", "env"] (if that actually worked)
// if (arePartsEqual(part.parts, parts)) {
// part.data = part.data.merge(user_define.value);
// didFind = true;
// break;
// }
// }
// initial_values = entry.value;
// }
// if (!didFind) {
// var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
// if (initial_values.len > 0) {
// list.appendSliceAssumeCapacity(initial_values);
// }
// list.appendAssumeCapacity(DotDefine{
// .data = user_define.value,
// // TODO: do we need to allocate this?
// .parts = parts,
// });
// try define.dots.put(tail, list.toOwnedSlice());
// }
// } else {
// // e.g. IS_BROWSER
// try define.identifiers.put(user_define.key, user_define.value);
// }
// }
// }
// return define;
}
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -682,7 +682,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
pub fn isUnboundEvalIdentifier(p: *Printer, value: Expr) bool {
switch (value.data) {
.e_identifier => |ident| {
.e_identifier => {
const ident = value.getIdentifier();
if (ident.ref.is_source_contents_slice) return false;
const symbol = p.symbols.get(p.symbols.follow(ident.ref)) orelse return false;
@@ -838,37 +839,41 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
defer debugl("</printExpr>");
switch (expr.data) {
.e_missing => |e| {},
.e_undefined => |e| {
.e_missing => {},
.e_undefined => {
p.printSpaceBeforeIdentifier();
p.printUndefined(level);
},
.e_super => |e| {
.e_super => {
p.printSpaceBeforeIdentifier();
p.print("super");
},
.e_null => |e| {
.e_null => {
p.printSpaceBeforeIdentifier();
p.print("null");
},
.e_this => |e| {
.e_this => {
p.printSpaceBeforeIdentifier();
p.print("this");
},
.e_spread => |e| {
.e_spread => {
const e = expr.getSpread();
p.print("...");
p.printExpr(e.value, .comma, ExprFlag.None());
},
.e_new_target => |e| {
.e_new_target => {
p.printSpaceBeforeIdentifier();
p.print("new.target");
},
.e_import_meta => |e| {
.e_import_meta => {
p.printSpaceBeforeIdentifier();
p.print("import.meta");
},
.e_new => |e| {
.e_new => {
const e = expr.getNew();
const has_pure_comment = e.can_be_unwrapped_if_unused;
const wrap = level.gte(.call) or (has_pure_comment and level.gte(.postfix));
@@ -908,7 +913,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_call => |e| {
.e_call => {
const e = expr.getCall();
var wrap = level.gte(.new) or flags.forbid_call;
var target_flags = ExprFlag.None();
if (e.optional_chain == null) {
@@ -964,10 +971,14 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_require => |e| {
.e_require => {
const e = expr.getRequire();
p.printRequireOrImportExpr(e.import_record_index, &([_]G.Comment{}), level, flags);
},
.e_require_or_require_resolve => |e| {
.e_require_or_require_resolve => {
const e = expr.getRequireOrRequireResolve();
const wrap = level.gte(.new) or flags.forbid_call;
if (wrap) {
p.print("(");
@@ -992,7 +1003,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_import => |e| {
.e_import => {
const e = expr.getImport();
// Handle non-string expressions
if (Ref.isSourceIndexNull(e.import_record_index)) {
const wrap = level.gte(.new) or flags.forbid_call;
@@ -1024,7 +1037,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printRequireOrImportExpr(e.import_record_index, e.leading_interior_comments, level, flags);
}
},
.e_dot => |e| {
.e_dot => {
const e = expr.getDot();
var wrap = false;
if (e.optional_chain == null) {
flags.has_non_optional_chain_parent = false;
@@ -1063,7 +1078,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_index => |e| {
.e_index => {
const e = expr.getIndex();
var wrap = false;
if (e.optional_chain == null) {
flags.has_non_optional_chain_parent = false;
@@ -1086,7 +1103,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
switch (e.index.data) {
.e_private_identifier => |priv| {
.e_private_identifier => {
const priv = e.index.getPrivateIdentifier();
if (is_optional_chain_start) {
p.print(".");
}
@@ -1104,7 +1122,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_if => |e| {
.e_if => {
const e = expr.getIf();
const wrap = level.gte(.conditional);
if (wrap) {
p.print("(");
@@ -1123,7 +1143,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_arrow => |e| {
.e_arrow => {
const e = expr.getArrow();
const wrap = level.gte(.assign);
if (wrap) {
@@ -1163,7 +1185,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_function => |e| {
.e_function => {
const e = expr.getFunction();
const n = p.js.lenI();
var wrap = p.stmt_start == n or p.export_default_start == n;
@@ -1190,7 +1214,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_class => |e| {
.e_class => {
const e = expr.getClass();
const n = p.js.lenI();
var wrap = p.stmt_start == n or p.export_default_start == n;
if (wrap) {
@@ -1208,7 +1234,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_array => |e| {
.e_array => {
const e = expr.getArray();
p.print("[");
if (e.items.len > 0) {
if (!e.is_single_line) {
@@ -1249,7 +1277,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print("]");
},
.e_object => |e| {
.e_object => {
const e = expr.getObject();
const n = p.js.lenI();
const wrap = p.stmt_start == n or p.arrow_expr_start == n;
@@ -1288,11 +1318,15 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_boolean => |e| {
.e_boolean => {
const e = expr.getBoolean();
p.printSpaceBeforeIdentifier();
p.print(if (e.value) "true" else "false");
},
.e_string => |e| {
.e_string => {
const e = expr.getString();
// If this was originally a template literal, print it as one as long as we're not minifying
if (e.prefer_template) {
p.print("`");
@@ -1306,7 +1340,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printString(e.*, c);
p.print(c);
},
.e_template => |e| {
.e_template => {
const e = expr.getTemplate();
if (e.tag) |tag| {
// Optional chains are forbidden in template tags
if (expr.isOptionalChain()) {
@@ -1341,7 +1377,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
p.print("`");
},
.e_reg_exp => |e| {
.e_reg_exp => {
const e = expr.getRegExp();
const n = p.js.len();
// Avoid forming a single-line comment
@@ -1354,12 +1392,16 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// Need a space before the next identifier to avoid it turning into flags
p.prev_reg_exp_end = p.js.lenI();
},
.e_big_int => |e| {
.e_big_int => {
const e = expr.getBigInt();
p.printSpaceBeforeIdentifier();
p.print(e.value);
p.print('n');
},
.e_number => |e| {
.e_number => {
const e = expr.getNumber();
const value = e.value;
const absValue = std.math.fabs(value);
@@ -1399,7 +1441,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.prev_num_end = p.js.lenI();
}
},
.e_identifier => |e| {
.e_identifier => {
const e = expr.getIdentifier();
const name = p.renamer.nameForSymbol(e.ref);
const wrap = p.js.lenI() == p.for_of_init_start and strings.eqlComptime(name, "let");
@@ -1414,7 +1458,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_import_identifier => |e| {
.e_import_identifier => {
const e = expr.getImportIdentifier();
// Potentially use a property access instead of an identifier
const ref = p.symbols.follow(e.ref);
var didPrint = false;
@@ -1427,7 +1473,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
var wrap = false;
if (p.call_target) |target| {
wrap = e.was_originally_identifier and target.e_import_identifier == e;
wrap = e.was_originally_identifier and target.e_import_identifier.index == expr.data.e_import_identifier.index;
}
if (wrap) {
@@ -1456,7 +1502,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.printSymbol(e.ref);
}
},
.e_await => |e| {
.e_await => {
const e = expr.getAwait();
const wrap = level.gte(.prefix);
if (wrap) {
@@ -1472,7 +1520,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_yield => |e| {
.e_yield => {
const e = expr.getYield();
const wrap = level.gte(.assign);
if (wrap) {
p.print("(");
@@ -1493,7 +1543,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_unary => |e| {
.e_unary => {
const e = expr.getUnary();
const entry: Op = Op.Table.get(e.op);
const wrap = level.gte(entry.level);
@@ -1524,7 +1576,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
p.print(")");
}
},
.e_binary => |e| {
.e_binary => {
const e = expr.getBinary();
const entry: Op = Op.Table.get(e.op);
var wrap = level.gte(entry.level) or (e.op == Op.Code.bin_in and flags.forbid_in);
@@ -1559,7 +1613,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// "??" can't directly contain "||" or "&&" without being wrapped in parentheses
.bin_nullish_coalescing => {
switch (e.left.data) {
.e_binary => |left| {
.e_binary => {
const left = e.left.getBinary();
switch (left.op) {
.bin_logical_and, .bin_logical_or => {
left_level = .prefix;
@@ -1571,7 +1626,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
switch (e.right.data) {
.e_binary => |right| {
.e_binary => {
const right = e.right.getBinary();
switch (right.op) {
.bin_logical_and, .bin_logical_or => {
right_level = .prefix;
@@ -1585,7 +1641,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// "**" can't contain certain unary expressions
.bin_pow => {
switch (e.left.data) {
.e_unary => |left| {
.e_unary => {
const left = e.left.getUnary();
if (left.op.unaryAssignTarget() == .none) {
left_level = .call;
}
@@ -1601,7 +1658,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// Special-case "#foo in bar"
if (e.op == .bin_in and @as(Expr.Tag, e.left.data) == .e_private_identifier) {
p.printSymbol(e.left.data.e_private_identifier.ref);
p.printSymbol(e.left.getPrivateIdentifier().ref);
} else {
flags.forbid_in = true;
p.printExpr(e.left, left_level, flags);
@@ -1701,7 +1758,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (item.value) |val| {
switch (val.data) {
.e_function => |func| {
.e_function => {
const func = val.getFunction();
if (item.flags.is_method) {
if (func.func.flags.is_async) {
p.printSpaceBeforeIdentifier();
@@ -1728,7 +1786,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (item.value) |val| {
switch (val.data) {
.e_function => |func| {
.e_function => {
const func = val.getFunction();
if (item.flags.is_method) {
p.printFunc(func.func);
return;
@@ -1749,10 +1808,11 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
switch (_key.data) {
.e_private_identifier => |key| {
p.printSymbol(key.ref);
.e_private_identifier => {
p.printSymbol(_key.getPrivateIdentifier().ref);
},
.e_string => |key| {
.e_string => {
const key = _key.getString();
p.addSourceMapping(_key.loc);
if (key.isUTF8()) {
p.printSpaceBeforeIdentifier();
@@ -1761,7 +1821,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// Use a shorthand property if the names are the same
if (item.value) |val| {
switch (val.data) {
.e_identifier => |e| {
.e_identifier => {
const e = val.getIdentifier();
// TODO: is needing to check item.flags.was_shorthand a bug?
// esbuild doesn't have to do that...
// maybe it's a symptom of some other underlying issue
@@ -1774,7 +1836,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
// if (strings) {}
},
.e_import_identifier => |e| {
.e_import_identifier => {
const e = val.getImportIdentifier();
const ref = p.symbols.follow(e.ref);
if (p.symbols.get(ref)) |symbol| {
if (symbol.namespace_alias == null and strings.eql(key.utf8, p.renamer.nameForSymbol(e.ref))) {
@@ -1795,7 +1859,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
// Use a shorthand property if the names are the same
if (item.value) |val| {
switch (val.data) {
.e_identifier => |e| {
.e_identifier => {
const e = val.getIdentifier();
// TODO: is needing to check item.flags.was_shorthand a bug?
// esbuild doesn't have to do that...
// maybe it's a symptom of some other underlying issue
@@ -1808,7 +1874,9 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
// if (strings) {}
},
.e_import_identifier => |e| {
.e_import_identifier => {
const e = val.getImportIdentifier();
const ref = p.symbols.follow(e.ref);
if (p.symbols.get(ref)) |symbol| {
if (symbol.namespace_alias == null and strings.utf16EqlString(key.value, p.renamer.nameForSymbol(e.ref))) {
@@ -1843,8 +1911,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (item.kind != .normal) {
switch (item.value.?.data) {
.e_function => |func| {
p.printFunc(func.func);
.e_function => {
p.printFunc(item.value.?.getFunction().func);
return;
},
else => {},
@@ -1853,7 +1921,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
if (item.value) |val| {
switch (val.data) {
.e_function => |f| {
.e_function => {
const f = val.getFunction();
if (item.flags.is_method) {
p.printFunc(f.func);
@@ -1979,7 +2048,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
}
switch (property.key.data) {
.e_string => |str| {
.e_string => {
const str = property.key.getString();
if (str.isUTF8()) {
p.addSourceMapping(property.key.loc);
p.printSpaceBeforeIdentifier();

View File

@@ -79,8 +79,8 @@ pub const PackageJSON = struct {
.main_fields = MainFieldMap.init(r.allocator),
};
if (json.getProperty("type")) |type_json| {
if (type_json.expr.getString(r.allocator)) |type_str| {
if (json.asProperty("type")) |type_json| {
if (type_json.expr.asString(r.allocator)) |type_str| {
switch (options.ModuleType.List.get(type_str) orelse options.ModuleType.unknown) {
.cjs => {
package_json.module_type = .cjs;
@@ -105,10 +105,10 @@ pub const PackageJSON = struct {
// Read the "main" fields
for (r.opts.main_fields) |main| {
if (json.getProperty(main)) |main_json| {
if (json.asProperty(main)) |main_json| {
const expr: js_ast.Expr = main_json.expr;
if ((expr.getString(r.allocator))) |str| {
if ((expr.asString(r.allocator))) |str| {
if (str.len > 0) {
package_json.main_fields.put(main, str) catch unreachable;
}
@@ -129,14 +129,15 @@ pub const PackageJSON = struct {
// "./dist/index.node.esm.js": "./dist/index.browser.esm.js"
// },
//
if (json.getProperty("browser")) |browser_prop| {
if (json.asProperty("browser")) |browser_prop| {
switch (browser_prop.expr.data) {
.e_object => |obj| {
.e_object => {
const obj = browser_prop.expr.getObject();
// The value is an object
// Remap all files in the browser field
for (obj.properties) |prop| {
var _key_str = (prop.key orelse continue).getString(r.allocator) orelse continue;
var _key_str = (prop.key orelse continue).asString(r.allocator) orelse continue;
const value: js_ast.Expr = prop.value orelse continue;
// Normalize the path so we can compare against it without getting
@@ -151,11 +152,13 @@ pub const PackageJSON = struct {
const key = r.allocator.dupe(u8, r.fs.normalize(_key_str)) catch unreachable;
switch (value.data) {
.e_string => |str| {
.e_string => {
const str = value.getString();
// If this is a string, it's a replacement package
package_json.browser_map.put(key, str.string(r.allocator) catch unreachable) catch unreachable;
},
.e_boolean => |boolean| {
.e_boolean => {
const boolean = value.getBoolean();
if (!boolean.value) {
package_json.browser_map.put(key, "") catch unreachable;
}

View File

@@ -74,9 +74,9 @@ pub const TSConfigJSON = struct {
var result: TSConfigJSON = TSConfigJSON{ .abs_path = source.key_path.text, .paths = PathsMap.init(allocator) };
errdefer allocator.free(result.paths);
if (json.getProperty("extends")) |extends_value| {
if (json.asProperty("extends")) |extends_value| {
log.addWarning(&source, extends_value.loc, "\"extends\" is not implemented yet") catch unreachable;
// if ((extends_value.expr.getString(allocator) catch null)) |str| {
// if ((extends_value.expr.asString(allocator) catch null)) |str| {
// if (extends(str, source.rangeOfString(extends_value.loc))) |base| {
// result.jsx = base.jsx;
// result.base_url_for_paths = base.base_url_for_paths;
@@ -90,48 +90,48 @@ pub const TSConfigJSON = struct {
var has_base_url = false;
// Parse "compilerOptions"
if (json.getProperty("compilerOptions")) |compiler_opts| {
if (json.asProperty("compilerOptions")) |compiler_opts| {
// Parse "baseUrl"
if (compiler_opts.expr.getProperty("baseUrl")) |base_url_prop| {
if ((base_url_prop.expr.getString(allocator))) |base_url| {
if (compiler_opts.expr.asProperty("baseUrl")) |base_url_prop| {
if ((base_url_prop.expr.asString(allocator))) |base_url| {
result.base_url = base_url;
has_base_url = true;
}
}
// Parse "jsxFactory"
if (compiler_opts.expr.getProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.getString(allocator)) |str| {
if (compiler_opts.expr.asProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.factory = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator);
}
}
// Parse "jsxFragmentFactory"
if (compiler_opts.expr.getProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.getString(allocator)) |str| {
if (compiler_opts.expr.asProperty("jsxFactory")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.fragment = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator);
}
}
// Parse "jsxImportSource"
if (compiler_opts.expr.getProperty("jsxImportSource")) |jsx_prop| {
if (jsx_prop.expr.getString(allocator)) |str| {
if (compiler_opts.expr.asProperty("jsxImportSource")) |jsx_prop| {
if (jsx_prop.expr.asString(allocator)) |str| {
result.jsx.import_source = str;
}
}
// Parse "useDefineForClassFields"
if (compiler_opts.expr.getProperty("useDefineForClassFields")) |use_define_value_prop| {
if (use_define_value_prop.expr.getBool()) |val| {
if (compiler_opts.expr.asProperty("useDefineForClassFields")) |use_define_value_prop| {
if (use_define_value_prop.expr.asBool()) |val| {
result.use_define_for_class_fields = val;
}
}
// Parse "importsNotUsedAsValues"
if (compiler_opts.expr.getProperty("importsNotUsedAsValues")) |jsx_prop| {
if (compiler_opts.expr.asProperty("importsNotUsedAsValues")) |jsx_prop| {
// This should never allocate since it will be utf8
if ((jsx_prop.expr.getString(allocator))) |str| {
if ((jsx_prop.expr.asString(allocator))) |str| {
switch (ImportsNotUsedAsValue.List.get(str) orelse ImportsNotUsedAsValue.invalid) {
.preserve, .err => {
result.preserve_imports_not_used_as_values = true;
@@ -145,14 +145,15 @@ pub const TSConfigJSON = struct {
}
// Parse "paths"
if (compiler_opts.expr.getProperty("paths")) |paths_prop| {
if (compiler_opts.expr.asProperty("paths")) |paths_prop| {
switch (paths_prop.expr.data) {
.e_object => |paths| {
.e_object => {
var paths = paths_prop.expr.getObject();
result.base_url_for_paths = result.base_url;
result.paths = PathsMap.init(allocator);
for (paths.properties) |property| {
const key_prop = property.key orelse continue;
const key = (key_prop.getString(allocator)) orelse continue;
const key = (key_prop.asString(allocator)) orelse continue;
if (!TSConfigJSON.isValidTSConfigPathNoBaseURLPattern(key, log, &source, allocator, key_prop.loc)) {
continue;
@@ -182,13 +183,15 @@ pub const TSConfigJSON = struct {
// Matching "folder1/file2" should first check "projectRoot/folder1/file2"
// and then, if that didn't work, also check "projectRoot/generated/folder1/file2".
switch (value_prop.data) {
.e_array => |array| {
.e_array => {
const array = value_prop.getArray();
if (array.items.len > 0) {
var values = allocator.alloc(string, array.items.len) catch unreachable;
errdefer allocator.free(values);
var count: usize = 0;
for (array.items) |expr| {
if ((expr.getString(allocator))) |str| {
if ((expr.asString(allocator))) |str| {
if (TSConfigJSON.isValidTSConfigPathPattern(
str,
log,