mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
wip
This commit is contained in:
28
.vscode/launch.json
vendored
28
.vscode/launch.json
vendored
@@ -2,22 +2,19 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Test",
|
||||
"type": "lldb",
|
||||
"name": "(lldb) Launch",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"stdio": null,
|
||||
"stopOnEntry": false,
|
||||
"program": "/usr/local/bin/zig",
|
||||
"program": "/Users/jarredsumner/Code/esdev/src/zig-cache/o/b57013855157d9a38baa6327511eaf3e/test",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": ["test", "${file}"],
|
||||
"presentation": {
|
||||
"hidden": false,
|
||||
"group": "",
|
||||
"order": 1
|
||||
},
|
||||
"env": {
|
||||
"TERM": "xterm"
|
||||
}
|
||||
"args": ["/Users/jarredsumner/Builds/zig/build/bin/zig"],
|
||||
"stopAtEntry": false,
|
||||
"environment": [],
|
||||
"miDebuggerPath": "/usr/local/bin/lldb-mi",
|
||||
"MIMode": "lldb",
|
||||
"targetArchitecture": "x64",
|
||||
|
||||
"externalConsole": false
|
||||
},
|
||||
|
||||
{
|
||||
@@ -25,11 +22,12 @@
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/zig-cache/bin/esdev",
|
||||
"args": [],
|
||||
"args": ["/Users/jarredsumner/Code/devserverless/build.js"],
|
||||
"stopAtEntry": false,
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [],
|
||||
"externalConsole": false,
|
||||
// "preLaunchTask": "build",
|
||||
"MIMode": "lldb",
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"logging": {
|
||||
|
||||
7
.vscode/tasks.json
vendored
7
.vscode/tasks.json
vendored
@@ -3,9 +3,8 @@
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"type": "shell",
|
||||
"type": "process",
|
||||
"command": "zig build",
|
||||
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@@ -24,9 +23,9 @@
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"type": "shell",
|
||||
"type": "process",
|
||||
"command": "zig",
|
||||
"args": ["test", "${file}", "-femit-bin=zig-cache/bin/test"],
|
||||
"args": ["test", "${file}"],
|
||||
|
||||
"group": {
|
||||
"kind": "test",
|
||||
|
||||
1
profile.json
Normal file
1
profile.json
Normal file
File diff suppressed because one or more lines are too long
@@ -27,7 +27,7 @@ pub const Ref = packed struct {
|
||||
inner_index: Int = 0,
|
||||
|
||||
// 2 bits of padding for whatever is the parent
|
||||
pub const Int = u31;
|
||||
pub const Int = u30;
|
||||
pub const None = Ref{ .inner_index = std.math.maxInt(Ref.Int) };
|
||||
pub fn isNull(self: *const Ref) bool {
|
||||
return self.source_index == std.math.maxInt(Ref.Int) and self.inner_index == std.math.maxInt(Ref.Int);
|
||||
@@ -55,3 +55,11 @@ pub const RequireOrImportMeta = struct {
|
||||
exports_ref: Ref = Ref.None,
|
||||
is_wrapper_async: bool = false,
|
||||
};
|
||||
pub fn debug(comptime fmt: []const u8, args: anytype) callconv(.Inline) void {
|
||||
// std.debug.print(fmt, args);
|
||||
}
|
||||
pub fn debugl(
|
||||
comptime fmt: []const u8,
|
||||
) callconv(.Inline) void {
|
||||
// std.debug.print("{s}\n", .{fmt});
|
||||
}
|
||||
|
||||
155
src/defines.zig
155
src/defines.zig
@@ -1,12 +1,159 @@
|
||||
const std = @import("std");
|
||||
const js_ast = @import("./js_ast.zig");
|
||||
const alloc = @import("alloc.zig");
|
||||
|
||||
usingnamespace @import("strings.zig");
|
||||
|
||||
const GlobalDefinesKey = @import("./defines-table.zig").GlobalDefinesKey;
|
||||
|
||||
pub const defaultIdentifierDefines = comptime {};
|
||||
const Globals = struct {
|
||||
pub const Undefined = js_ast.E.Undefined{};
|
||||
pub const UndefinedPtr = &Globals.Undefined;
|
||||
|
||||
pub const IdentifierDefine = struct {};
|
||||
pub const NaN = js_ast.E.Number{ .value = std.math.nan(f64) };
|
||||
pub const NanPtr = &Globals.NaN;
|
||||
|
||||
pub const DotDefine = struct {};
|
||||
pub const Infinity = js_ast.E.Number{ .value = std.math.inf(f64) };
|
||||
pub const InfinityPtr = &Globals.Infinity;
|
||||
};
|
||||
|
||||
pub const Defines = struct {};
|
||||
pub const DefineData = struct {
|
||||
value: js_ast.Expr.Data = DefaultValue,
|
||||
|
||||
// True if accessing this value is known to not have any side effects. For
|
||||
// example, a bare reference to "Object.create" can be removed because it
|
||||
// does not have any observable side effects.
|
||||
can_be_removed_if_unused: bool = false,
|
||||
|
||||
// True if a call to this value is known to not have any side effects. For
|
||||
// example, a bare call to "Object()" can be removed because it does not
|
||||
// have any observable side effects.
|
||||
call_can_be_unwrapped_if_unused: bool = false,
|
||||
|
||||
pub const DefaultValue = js_ast.Expr.Data{ .e_undefined = Globals.UndefinedPtr };
|
||||
|
||||
// All the globals have the same behavior.
|
||||
// So we can create just one struct for it.
|
||||
pub const GlobalDefineData = DefineData{};
|
||||
|
||||
pub fn merge(a: DefineData, b: DefineData) DefineData {
|
||||
return DefineData{
|
||||
.value = b.value,
|
||||
.can_be_removed_if_unsued = a.can_be_removed_if_unsued,
|
||||
.call_can_be_unwrapped_if_unused = a.call_can_be_unwrapped_if_unused,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
fn arePartsEqual(a: []string, b: []string) bool {
|
||||
if (a.len != b.len) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < a.len) : (i += 1) {
|
||||
if (!strings.eql(a[i], b[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
pub const IdentifierDefine = DefineData;
|
||||
|
||||
pub const DotDefine = struct {
|
||||
parts: []string,
|
||||
data: DefineData,
|
||||
};
|
||||
|
||||
pub const Define = struct {
|
||||
identifiers: std.StringHashMapUnmanaged(IdentifierDefine),
|
||||
dots: std.StringHashMapUnmanaged([]DotDefine),
|
||||
allocator: *std.mem.Allocator,
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, user_defines: std.StringHashMap(DefineData)) !*@This() {
|
||||
var define = try allocator.create(Define);
|
||||
define.allocator = allocator;
|
||||
try define.identifiers.ensureCapacity(allocator, 641);
|
||||
try define.dots.ensureCapacity(allocator, 38);
|
||||
|
||||
// Step 1. Load the globals into the hash tables
|
||||
for (GlobalDefinesKey) |global| {
|
||||
if (global.len == 1) {
|
||||
// TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
|
||||
define.identifiers.putAssumeCapacity(global[0], IdentifierDefine.GlobalDefineData);
|
||||
} else {
|
||||
// TODO: when https://github.com/ziglang/zig/pull/8596 is merged, switch to putAssumeCapacityNoClobber
|
||||
define.dots.putAssumeCapacity(global[global.len - 1], DotDefine{
|
||||
.parts = global[0 .. global.len - 1],
|
||||
.data = DefineData.GlobalDefineData,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2. Swap in certain literal values because those can be constant folded
|
||||
define.identifiers.putAssumeCapacity("undefined", DefineData{
|
||||
.value = js_ast.Expr.Data{ .e_undefined = Globals.UndefinedPtr },
|
||||
});
|
||||
define.identifiers.putAssumeCapacity("NaN", DefineData{
|
||||
.value = js_ast.Expr.Data{ .e_number = Globals.NanPtr },
|
||||
});
|
||||
define.identifiers.putAssumeCapacity("Infinity", DefineData{
|
||||
.value = js_ast.Expr.Data{ .e_number = Globals.InfinityPtr },
|
||||
});
|
||||
|
||||
// Step 3. Load user data into hash tables
|
||||
// At this stage, user data has already been validated.
|
||||
if (user_defines.count() > 0) {
|
||||
var iter = user_defines.iterator();
|
||||
while (iter.next()) |user_define| {
|
||||
// If it has a dot, then it's a DotDefine.
|
||||
// e.g. process.env.NODE_ENV
|
||||
if (strings.lastIndexOfChar(user_define.key, '.')) |last_dot| {
|
||||
const tail = user_define.key[last_dot + 1 .. user_define.key.len];
|
||||
const parts = std.mem.tokenize(user_define.key[0..last_dot], ".").rest();
|
||||
var didFind = false;
|
||||
var initial_values = &([_]DotDefine{});
|
||||
|
||||
// "NODE_ENV"
|
||||
if (define.dots.getEntry()) |entry| {
|
||||
for (entry.value) |*part| {
|
||||
// ["process", "env"] == ["process", "env"]
|
||||
if (arePartsEqual(part.parts, parts)) {
|
||||
part.data = part.data.merge(user_define.value);
|
||||
didFind = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
initial_values = entry.value;
|
||||
}
|
||||
|
||||
if (!didFind) {
|
||||
var list = try std.ArrayList(DotDefine).initCapacity(allocator, initial_values.len + 1);
|
||||
if (initial_values.len > 0) {
|
||||
list.appendSliceAssumeCapacity(initial_values);
|
||||
}
|
||||
|
||||
list.appendAssumeCapacity(DotDefine{
|
||||
.data = user_define.value,
|
||||
// TODO: do we need to allocate this?
|
||||
.parts = parts,
|
||||
});
|
||||
try define.dots.put(allocator, tail, list.toOwnedSlice());
|
||||
}
|
||||
} else {
|
||||
// IS_BROWSER
|
||||
try define.identifiers.put(user_define.key, user_define.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return define;
|
||||
}
|
||||
};
|
||||
|
||||
test "defines" {
|
||||
|
||||
}
|
||||
|
||||
0
src/global_name_parser.zig
Normal file
0
src/global_name_parser.zig
Normal file
@@ -93,6 +93,73 @@ pub const Binding = struct {
|
||||
loc: logger.Loc,
|
||||
data: B,
|
||||
|
||||
pub fn ToExpr(comptime expr_type: type, comptime func_type: anytype) type {
|
||||
const ExprType = expr_type;
|
||||
return struct {
|
||||
context: *ExprType,
|
||||
allocator: *std.mem.Allocator,
|
||||
pub const Context = @This();
|
||||
|
||||
pub fn wrapIdentifier(ctx: *const Context, loc: logger.Loc, ref: Ref) Expr {
|
||||
return func_type(ctx.context, loc, ref);
|
||||
}
|
||||
|
||||
pub fn init(context: *ExprType) Context {
|
||||
return Context{ .context = context, .allocator = context.allocator };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
var loc = binding.loc;
|
||||
|
||||
switch (binding.data) {
|
||||
.b_missing => {
|
||||
return Expr.alloc(wrapper.allocator, E.Missing{}, loc);
|
||||
},
|
||||
|
||||
.b_identifier => |b| {
|
||||
return wrapper.wrapIdentifier(loc, b.ref);
|
||||
},
|
||||
.b_array => |b| {
|
||||
var exprs = wrapper.allocator.alloc(Expr, b.items.len) catch unreachable;
|
||||
var i: usize = 0;
|
||||
while (i < exprs.len) : (i += 1) {
|
||||
const item = b.items[i];
|
||||
exprs[i] = convert: {
|
||||
const expr = toExpr(&item.binding, wrapper);
|
||||
if (b.has_spread and i == exprs.len - 1) {
|
||||
break :convert Expr.alloc(wrapper.allocator, E.Spread{ .value = expr }, expr.loc);
|
||||
} else if (item.default_value) |default| {
|
||||
break :convert Expr.assign(expr, default, wrapper.allocator);
|
||||
} else {
|
||||
break :convert expr;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.alloc(wrapper.allocator, E.Array{ .items = exprs, .is_single_line = b.is_single_line }, loc);
|
||||
},
|
||||
.b_object => |b| {
|
||||
var properties = wrapper.allocator.alloc(G.Property, b.properties.len) catch unreachable;
|
||||
var i: usize = 0;
|
||||
while (i < properties.len) : (i += 1) {
|
||||
const item = b.properties[i];
|
||||
properties[i] = G.Property{
|
||||
.flags = item.flags,
|
||||
.kind = if (item.flags.is_spread) G.Property.Kind.spread else G.Property.Kind.normal,
|
||||
.value = toExpr(&item.value, wrapper),
|
||||
.initializer = item.default_value,
|
||||
};
|
||||
}
|
||||
return Expr.alloc(wrapper.allocator, E.Object{ .properties = properties, .is_single_line = b.is_single_line }, loc);
|
||||
},
|
||||
else => {
|
||||
std.debug.panic("Interanl error", .{});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub const Tag = packed enum {
|
||||
b_identifier,
|
||||
b_array,
|
||||
@@ -511,7 +578,7 @@ pub const Symbol = struct {
|
||||
// single inner array, so you can join the maps together by just make a
|
||||
// single outer array containing all of the inner arrays. See the comment on
|
||||
// "Ref" for more detail.
|
||||
symbols_for_source: [][]Symbol = undefined,
|
||||
symbols_for_source: [][]Symbol,
|
||||
|
||||
pub fn get(self: *Map, ref: Ref) ?Symbol {
|
||||
return self.symbols_for_source[ref.source_index][ref.inner_index];
|
||||
@@ -522,6 +589,10 @@ pub const Symbol = struct {
|
||||
return Map{ .symbols_for_source = symbols_for_source };
|
||||
}
|
||||
|
||||
pub fn initList(list: [][]Symbol) Map {
|
||||
return Map{ .symbols_for_source = list };
|
||||
}
|
||||
|
||||
pub fn follow(symbols: *Map, ref: Ref) Ref {
|
||||
if (symbols.get(ref)) |*symbol| {
|
||||
const link = symbol.link orelse return ref;
|
||||
@@ -1974,12 +2045,11 @@ pub const Expr = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn assign(a: *Expr, b: *Expr, allocator: *std.mem.Allocator) Expr {
|
||||
std.debug.assert(a != b);
|
||||
pub fn assign(a: Expr, b: Expr, allocator: *std.mem.Allocator) Expr {
|
||||
return alloc(allocator, E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = a.*,
|
||||
.right = b.*,
|
||||
.left = a,
|
||||
.right = b,
|
||||
}, a.loc);
|
||||
}
|
||||
pub fn at(expr: *Expr, t: anytype, allocator: *std.mem.allocator) callconv(.Inline) Expr {
|
||||
@@ -2061,15 +2131,13 @@ pub const Expr = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn assignStmt(a: *Expr, b: *Expr, allocator: *std.mem.Allocator) Stmt {
|
||||
pub fn assignStmt(a: Expr, b: Expr, allocator: *std.mem.Allocator) Stmt {
|
||||
return Stmt.alloc(
|
||||
allocator,
|
||||
S.SExpr{
|
||||
.op = .assign,
|
||||
.left = a,
|
||||
.right = b,
|
||||
.value = Expr.assign(a, b, allocator),
|
||||
},
|
||||
loc,
|
||||
a.loc,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2883,7 +2951,9 @@ pub const Scope = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn printmem(comptime format: string, args: anytype) void {}
|
||||
pub fn printmem(comptime format: string, args: anytype) void {
|
||||
// std.debug.print(format, args);
|
||||
}
|
||||
|
||||
test "Binding.init" {
|
||||
var binding = Binding.alloc(
|
||||
@@ -3073,3 +3143,4 @@ test "Expr.init" {
|
||||
// Stmt | 192
|
||||
// STry | 384
|
||||
// -- ESBuild bit sizes
|
||||
|
||||
|
||||
2566
src/js_lexer.zig
2566
src/js_lexer.zig
File diff suppressed because it is too large
Load Diff
@@ -231,6 +231,13 @@ pub const PropertyModifierKeyword = enum {
|
||||
});
|
||||
};
|
||||
|
||||
pub const TypeScriptAccessibilityModifier = std.ComptimeStringMap(u1, .{
|
||||
.{ "public", 1 },
|
||||
.{ "private", 1 },
|
||||
.{ "protected", 1 },
|
||||
.{ "readonly", 1 },
|
||||
});
|
||||
|
||||
pub const TokenEnumType = std.EnumArray(T, []u8);
|
||||
|
||||
pub const tokenToString = comptime {
|
||||
@@ -532,6 +539,7 @@ pub var jsxEntity: JSXEntityMap = undefined;
|
||||
|
||||
pub fn initJSXEntityMap() !void {
|
||||
jsxEntity = JSXEntityMap.init(alloc.dynamic);
|
||||
// return jsxEntity;
|
||||
jsxEntity.ensureCapacity(255) catch unreachable;
|
||||
|
||||
jsxEntity.putAssumeCapacity("quot", @as(CodePoint, 0x0022));
|
||||
@@ -793,14 +801,14 @@ test "tokenToString" {
|
||||
expectString(tokenToString.get(T.t_end_of_file), "end of file");
|
||||
}
|
||||
|
||||
test "jsxEntity" {
|
||||
try alloc.setup(std.heap.page_allocator);
|
||||
// test "jsxEntity" {
|
||||
// try alloc.setup(std.heap.page_allocator);
|
||||
|
||||
initJSXEntityMap() catch |err| {
|
||||
@panic(@errorName(err));
|
||||
};
|
||||
// initJSXEntityMap() catch |err| {
|
||||
// @panic(@errorName(err));
|
||||
// };
|
||||
|
||||
if (jsxEntity.get("sim")) |v| {
|
||||
expect(v == 0x223C);
|
||||
}
|
||||
}
|
||||
// if (jsxEntity.get("sim")) |v| {
|
||||
// expect(v == 0x223C);
|
||||
// }
|
||||
// }
|
||||
|
||||
1322
src/js_parser.zig
1322
src/js_parser.zig
File diff suppressed because it is too large
Load Diff
@@ -302,6 +302,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
p.print("}");
|
||||
}
|
||||
pub fn printDecls(p: *Printer, keyword: string, decls: []G.Decl, flags: ExprFlag) void {
|
||||
debug("<printDecls>\n {s}", .{decls});
|
||||
defer debug("</printDecls>", .{});
|
||||
p.print(keyword);
|
||||
p.printSpace();
|
||||
|
||||
@@ -330,6 +332,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
pub fn addSourceMapping(p: *Printer, loc: logger.Loc) void {}
|
||||
|
||||
pub fn printSymbol(p: *Printer, ref: Ref) void {
|
||||
debug("<printSymbol>\n {s}", .{ref});
|
||||
defer debugl("</printSymbol>");
|
||||
const name = p.renamer.nameForSymbol(ref);
|
||||
|
||||
p.printIdentifier(name);
|
||||
@@ -781,6 +785,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
pub fn printExpr(p: *Printer, expr: Expr, level: Level, _flags: ExprFlag) void {
|
||||
p.addSourceMapping(expr.loc);
|
||||
var flags = _flags;
|
||||
debugl("<printExpr>");
|
||||
defer debugl("</printExpr>");
|
||||
|
||||
switch (expr.data) {
|
||||
.e_missing => |e| {},
|
||||
@@ -1106,6 +1112,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
.e_function => |e| {
|
||||
const n = p.js.lenI();
|
||||
var wrap = p.stmt_start == n or p.export_default_start == n;
|
||||
|
||||
if (wrap) {
|
||||
p.print("(");
|
||||
}
|
||||
@@ -1123,6 +1130,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
if (e.func.name) |sym| {
|
||||
p.printSymbol(sym.ref orelse std.debug.panic("internal error: expected E.Function's name symbol to have a ref\n{s}", .{e.func}));
|
||||
}
|
||||
|
||||
p.printFunc(e.func);
|
||||
if (wrap) {
|
||||
p.print(")");
|
||||
@@ -1592,6 +1600,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
}
|
||||
|
||||
pub fn printProperty(p: *Printer, item: G.Property) void {
|
||||
debugl("<printProperty>");
|
||||
defer debugl("</printProperty>");
|
||||
if (item.kind == .spread) {
|
||||
p.print("...");
|
||||
p.printExpr(item.value.?, .comma, ExprFlag.None());
|
||||
@@ -1748,6 +1758,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
}
|
||||
|
||||
pub fn printBinding(p: *Printer, binding: Binding) void {
|
||||
debug("<printBinding>\n {s}", .{binding});
|
||||
defer debugl("</printBinding>");
|
||||
p.addSourceMapping(binding.loc);
|
||||
|
||||
switch (binding.data) {
|
||||
@@ -1903,6 +1915,8 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
}
|
||||
|
||||
pub fn printStmt(p: *Printer, stmt: Stmt) !void {
|
||||
debug("<printStmt>: {s}\n", .{stmt});
|
||||
defer debug("</printStmt>: {s}\n", .{stmt});
|
||||
p.comptime_flush();
|
||||
|
||||
p.addSourceMapping(stmt.loc);
|
||||
@@ -2682,9 +2696,7 @@ pub fn NewPrinter(comptime ascii_only: bool) type {
|
||||
.js = js,
|
||||
.writer = js.writer(),
|
||||
.linker = linker,
|
||||
.renamer = rename.Renamer{
|
||||
.symbols = symbols,
|
||||
},
|
||||
.renamer = rename.Renamer.init(symbols),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -36,9 +36,9 @@ const Level = js_ast.Op.Level;
|
||||
const Op = js_ast.Op;
|
||||
const Scope = js_ast.Scope;
|
||||
const locModuleScope = logger.Loc.Empty;
|
||||
const Lexer = js_lexer.Lexer;
|
||||
|
||||
fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
|
||||
const Lexer = if (opts.allow_comments) js_lexer.TSConfigJSONLexer else js_lexer.JSONLexer;
|
||||
return struct {
|
||||
lexer: Lexer,
|
||||
source: *logger.Source,
|
||||
@@ -46,12 +46,21 @@ fn JSONLikeParser(opts: js_lexer.JSONOptions) type {
|
||||
allocator: *std.mem.Allocator,
|
||||
|
||||
pub fn init(allocator: *std.mem.Allocator, source: *logger.Source, log: *logger.Log) !Parser {
|
||||
return Parser{
|
||||
.lexer = try Lexer.init(log, source, allocator),
|
||||
.allocator = allocator,
|
||||
.log = log,
|
||||
.source = source,
|
||||
};
|
||||
if (opts.allow_comments) {
|
||||
return Parser{
|
||||
.lexer = try Lexer.initTSConfig(log, source, allocator),
|
||||
.allocator = allocator,
|
||||
.log = log,
|
||||
.source = source,
|
||||
};
|
||||
} else {
|
||||
return Parser{
|
||||
.lexer = try Lexer.initJSON(log, source, allocator),
|
||||
.allocator = allocator,
|
||||
.log = log,
|
||||
.source = source,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const Parser = @This();
|
||||
@@ -217,6 +226,7 @@ const duplicateKeyJson = "{ \"name\": \"valid\", \"name\": \"invalid\" }";
|
||||
|
||||
const js_printer = @import("js_printer.zig");
|
||||
const renamer = @import("renamer.zig");
|
||||
const SymbolList = [][]Symbol;
|
||||
|
||||
fn expectPrintedJSON(_contents: string, expected: string) void {
|
||||
if (alloc.dynamic_manager == null) {
|
||||
@@ -240,7 +250,8 @@ fn expectPrintedJSON(_contents: string, expected: string) void {
|
||||
.stmts = &([_]Stmt{stmt}),
|
||||
};
|
||||
const tree = js_ast.Ast.initTest(&([_]js_ast.Part{part}));
|
||||
var symbol_map = Symbol.Map{};
|
||||
var symbols: SymbolList = &([_][]Symbol{tree.symbols});
|
||||
var symbol_map = js_ast.Symbol.Map.initList(symbols);
|
||||
if (log.msgs.items.len > 0) {
|
||||
std.debug.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text });
|
||||
}
|
||||
|
||||
@@ -53,6 +53,7 @@ pub const Location = struct {
|
||||
length: usize = 0, // in bytes
|
||||
line_text: ?string = null,
|
||||
suggestion: ?string = null,
|
||||
offset: usize = 0,
|
||||
|
||||
pub fn init(file: []u8, namespace: []u8, line: i32, column: i32, length: u32, line_text: ?[]u8, suggestion: ?[]u8) Location {
|
||||
return Location{
|
||||
@@ -63,6 +64,7 @@ pub const Location = struct {
|
||||
.length = length,
|
||||
.line_text = line_text,
|
||||
.suggestion = suggestion,
|
||||
.offset = length,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -76,6 +78,7 @@ pub const Location = struct {
|
||||
.column = usize2Loc(data.column_count).start,
|
||||
.length = source.contents.len,
|
||||
.line_text = source.contents[data.line_start..data.line_end],
|
||||
.offset = @intCast(usize, std.math.max(r.loc.start, 0)),
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
@@ -104,11 +107,27 @@ pub const Msg = struct {
|
||||
data: Data,
|
||||
notes: ?[]Data = null,
|
||||
pub fn doFormat(msg: *const Msg, to: anytype, formatterFunc: @TypeOf(std.fmt.format)) !void {
|
||||
try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, msg.data.location.?.file, msg.data.location.?.line, msg.data.location.?.column });
|
||||
try formatterFunc(to, "\n\n{s}: {s}\n{s}\n{s}:{}:{} {d}", .{
|
||||
msg.kind.string(),
|
||||
msg.data.text,
|
||||
msg.data.location.?.line_text,
|
||||
msg.data.location.?.file,
|
||||
msg.data.location.?.line,
|
||||
msg.data.location.?.column,
|
||||
msg.data.location.?.offset,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn formatNoWriter(msg: *const Msg, comptime formatterFunc: @TypeOf(std.debug.panic)) void {
|
||||
formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{}", .{ msg.kind.string(), msg.data.text, msg.data.location.?.line_text, msg.data.location.?.file, msg.data.location.?.line, msg.data.location.?.column });
|
||||
formatterFunc("\n\n{s}: {s}\n{s}\n{s}:{}:{} ({d})", .{
|
||||
msg.kind.string(),
|
||||
msg.data.text,
|
||||
msg.data.location.?.line_text,
|
||||
msg.data.location.?.file,
|
||||
msg.data.location.?.line,
|
||||
msg.data.location.?.column,
|
||||
msg.data.location.?.offset,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -270,7 +289,12 @@ pub const Source = struct {
|
||||
// symbol for an "export default" statement will be called "util_default".
|
||||
identifier_name: string,
|
||||
|
||||
pub const ErrorPosition = struct { line_start: usize, line_end: usize, column_count: usize, line_count: usize };
|
||||
pub const ErrorPosition = struct {
|
||||
line_start: usize,
|
||||
line_end: usize,
|
||||
column_count: usize,
|
||||
line_count: usize,
|
||||
};
|
||||
|
||||
pub fn initFile(file: fs.File, allocator: *std.mem.Allocator) Source {
|
||||
var name = file.path.name;
|
||||
|
||||
@@ -39,6 +39,7 @@ pub fn main() anyerror!void {
|
||||
.value = js_ast.StmtOrExpr{ .expr = expr },
|
||||
.default_name = js_ast.LocRef{ .loc = logger.Loc{}, .ref = Ref{} },
|
||||
}, logger.Loc{ .start = 0 });
|
||||
|
||||
var part = js_ast.Part{
|
||||
.stmts = &([_]js_ast.Stmt{stmt}),
|
||||
};
|
||||
@@ -56,10 +57,11 @@ pub fn main() anyerror!void {
|
||||
}
|
||||
|
||||
var _linker = linker.Linker{};
|
||||
var symbols: [][]js_ast.Symbol = &([_][]js_ast.Symbol{ast.symbols});
|
||||
const printed = try js_printer.printAst(
|
||||
alloc.dynamic,
|
||||
ast,
|
||||
js_ast.Symbol.Map{},
|
||||
js_ast.Symbol.Map.initList(symbols),
|
||||
false,
|
||||
js_printer.Options{ .to_module_ref = js_ast.Ref{ .inner_index = 0 } },
|
||||
&_linker,
|
||||
|
||||
@@ -237,3 +237,38 @@ pub fn containsNonBmpCodePointUTF16(_text: JavascriptString) bool {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Super simple "perfect hash" algorithm
|
||||
/// Only really useful for switching on strings
|
||||
// TODO: can we auto detect and promote the underlying type?
|
||||
pub fn ExactSizeMatcher(comptime max_bytes: usize) type {
|
||||
const T = std.meta.Int(
|
||||
.unsigned,
|
||||
max_bytes * 8,
|
||||
);
|
||||
|
||||
return struct {
|
||||
pub fn match(str: anytype) T {
|
||||
return hash(str) orelse std.math.maxInt(T);
|
||||
}
|
||||
|
||||
pub fn case(comptime str: []const u8) T {
|
||||
return hash(str) orelse std.math.maxInt(T);
|
||||
}
|
||||
|
||||
fn hash(str: anytype) ?T {
|
||||
// if (str.len > max_bytes) return null;
|
||||
var tmp = [_]u8{0} ** max_bytes;
|
||||
std.mem.copy(u8, &tmp, str);
|
||||
return std.mem.readIntNative(T, &tmp);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const eight = ExactSizeMatcher(8);
|
||||
|
||||
test "ExactSizeMatcher" {
|
||||
const word = "yield";
|
||||
expect(eight.match(word) == eight.case("yield"));
|
||||
expect(eight.match(word) != eight.case("yields"));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user