mark as noalias (#20262)

Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com>
This commit is contained in:
Jarred Sumner
2025-06-08 08:20:09 -07:00
committed by GitHub
parent 498186764a
commit df84f665a5
3 changed files with 138 additions and 138 deletions

View File

@@ -178,11 +178,11 @@ fn NewLexer_(
void = if (json_options.guess_indentation)
.{},
pub inline fn loc(self: *const LexerType) logger.Loc {
pub inline fn loc(noalias self: *const LexerType) logger.Loc {
return logger.usize2Loc(self.start);
}
pub fn syntaxError(self: *LexerType) !void {
pub fn syntaxError(noalias self: *LexerType) !void {
@branchHint(.cold);
// Only add this if there is not already an error.
@@ -193,20 +193,20 @@ fn NewLexer_(
return Error.SyntaxError;
}
pub fn addDefaultError(self: *LexerType, msg: []const u8) !void {
pub fn addDefaultError(noalias self: *LexerType, msg: []const u8) !void {
@branchHint(.cold);
self.addError(self.start, "{s}", .{msg}, true);
return Error.SyntaxError;
}
pub fn addSyntaxError(self: *LexerType, _loc: usize, comptime fmt: []const u8, args: anytype) !void {
pub fn addSyntaxError(noalias self: *LexerType, _loc: usize, comptime fmt: []const u8, args: anytype) !void {
@branchHint(.cold);
self.addError(_loc, fmt, args, false);
return Error.SyntaxError;
}
pub fn addError(self: *LexerType, _loc: usize, comptime format: []const u8, args: anytype, _: bool) void {
pub fn addError(noalias self: *LexerType, _loc: usize, comptime format: []const u8, args: anytype, _: bool) void {
@branchHint(.cold);
if (self.is_log_disabled) return;
@@ -219,7 +219,7 @@ fn NewLexer_(
self.prev_error_loc = __loc;
}
pub fn addRangeError(self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, _: bool) !void {
pub fn addRangeError(noalias self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, _: bool) !void {
@branchHint(.cold);
if (self.is_log_disabled) return;
@@ -236,7 +236,7 @@ fn NewLexer_(
// }
}
pub fn addRangeErrorWithNotes(self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, notes: []const logger.Data) !void {
pub fn addRangeErrorWithNotes(noalias self: *LexerType, r: logger.Range, comptime format: []const u8, args: anytype, notes: []const logger.Data) !void {
@branchHint(.cold);
if (self.is_log_disabled) return;
@@ -282,7 +282,7 @@ fn NewLexer_(
/// Look ahead at the next n codepoints without advancing the iterator.
/// If fewer than n codepoints are available, then return the remainder of the string.
fn peek(it: *LexerType, n: usize) string {
fn peek(noalias it: *LexerType, n: usize) string {
const original_i = it.current;
defer it.current = original_i;
@@ -296,11 +296,11 @@ fn NewLexer_(
return it.source.contents[original_i..end_ix];
}
pub inline fn isIdentifierOrKeyword(lexer: LexerType) bool {
pub inline fn isIdentifierOrKeyword(noalias lexer: *const LexerType) bool {
return @intFromEnum(lexer.token) >= @intFromEnum(T.t_identifier);
}
pub fn deinit(this: *LexerType) void {
pub fn deinit(noalias this: *LexerType) void {
this.temp_buffer_u16.clearAndFree();
this.all_comments.clearAndFree();
this.comments_to_preserve_before.clearAndFree();
@@ -794,7 +794,7 @@ fn NewLexer_(
}
}
inline fn nextCodepointSlice(it: *LexerType) []const u8 {
inline fn nextCodepointSlice(noalias it: *const LexerType) []const u8 {
if (it.current >= it.source.contents.len) {
return "";
}
@@ -802,11 +802,11 @@ fn NewLexer_(
return if (!(cp_len + it.current > it.source.contents.len)) it.source.contents[it.current .. cp_len + it.current] else "";
}
fn remaining(it: *const LexerType) []const u8 {
fn remaining(noalias it: *const LexerType) []const u8 {
return it.source.contents[it.current..];
}
inline fn nextCodepoint(it: *LexerType) CodePoint {
inline fn nextCodepoint(noalias it: *LexerType) CodePoint {
if (it.current >= it.source.contents.len) {
it.end = it.source.contents.len;
return -1;
@@ -830,7 +830,7 @@ fn NewLexer_(
return code_point;
}
pub fn step(lexer: *LexerType) void {
pub fn step(noalias lexer: *LexerType) void {
lexer.code_point = lexer.nextCodepoint();
// Track the approximate number of newlines in the file so we can preallocate
@@ -842,7 +842,7 @@ fn NewLexer_(
lexer.approximate_newline_count += @intFromBool(lexer.code_point == '\n');
}
pub inline fn expect(self: *LexerType, comptime token: T) !void {
pub inline fn expect(noalias self: *LexerType, comptime token: T) !void {
if (self.token != token) {
try self.expected(token);
}
@@ -850,7 +850,7 @@ fn NewLexer_(
try self.next();
}
pub inline fn expectOrInsertSemicolon(lexer: *LexerType) !void {
pub inline fn expectOrInsertSemicolon(noalias lexer: *LexerType) !void {
if (lexer.token == T.t_semicolon or (!lexer.has_newline_before and
lexer.token != T.t_close_brace and lexer.token != T.t_end_of_file))
{
@@ -858,7 +858,7 @@ fn NewLexer_(
}
}
pub fn addUnsupportedSyntaxError(self: *LexerType, msg: []const u8) !void {
pub fn addUnsupportedSyntaxError(noalias self: *LexerType, msg: []const u8) !void {
self.addError(self.end, "Unsupported syntax: {s}", .{msg}, true);
return Error.SyntaxError;
}
@@ -993,7 +993,7 @@ fn NewLexer_(
return result;
}
pub fn expectContextualKeyword(self: *LexerType, comptime keyword: string) !void {
pub fn expectContextualKeyword(noalias self: *LexerType, comptime keyword: string) !void {
if (!self.isContextualKeyword(keyword)) {
if (@import("builtin").mode == std.builtin.Mode.Debug) {
self.addError(self.start, "Expected \"{s}\" but found \"{s}\" (token: {s})", .{
@@ -1009,7 +1009,7 @@ fn NewLexer_(
try self.next();
}
pub fn maybeExpandEquals(lexer: *LexerType) !void {
pub fn maybeExpandEquals(noalias lexer: *LexerType) !void {
switch (lexer.code_point) {
'>' => {
// "=" + ">" = "=>"
@@ -1031,7 +1031,7 @@ fn NewLexer_(
}
}
pub fn expectLessThan(lexer: *LexerType, comptime is_inside_jsx_element: bool) !void {
pub fn expectLessThan(noalias lexer: *LexerType, comptime is_inside_jsx_element: bool) !void {
switch (lexer.token) {
.t_less_than => {
if (is_inside_jsx_element) {
@@ -1059,7 +1059,7 @@ fn NewLexer_(
}
}
pub fn expectGreaterThan(lexer: *LexerType, comptime is_inside_jsx_element: bool) !void {
pub fn expectGreaterThan(noalias lexer: *LexerType, comptime is_inside_jsx_element: bool) !void {
switch (lexer.token) {
.t_greater_than => {
if (is_inside_jsx_element) {
@@ -1101,7 +1101,7 @@ fn NewLexer_(
}
}
pub fn next(lexer: *LexerType) !void {
pub fn next(noalias lexer: *LexerType) !void {
lexer.has_newline_before = lexer.end == 0;
lexer.has_pure_comment_before = false;
lexer.has_no_side_effect_comment_before = false;
@@ -1801,7 +1801,7 @@ fn NewLexer_(
}
}
pub fn expected(self: *LexerType, token: T) !void {
pub fn expected(noalias self: *LexerType, token: T) !void {
if (self.is_log_disabled) {
return error.Backtrack;
} else if (tokenToString.get(token).len > 0) {
@@ -1811,7 +1811,7 @@ fn NewLexer_(
}
}
pub fn unexpected(lexer: *LexerType) !void {
pub fn unexpected(noalias lexer: *LexerType) !void {
const found = finder: {
lexer.start = @min(lexer.start, lexer.end);
@@ -1826,11 +1826,11 @@ fn NewLexer_(
try lexer.addRangeError(lexer.range(), "Unexpected {s}", .{found}, true);
}
pub fn raw(self: *LexerType) []const u8 {
pub fn raw(noalias self: *const LexerType) []const u8 {
return self.source.contents[self.start..self.end];
}
pub fn isContextualKeyword(self: *LexerType, comptime keyword: string) bool {
pub fn isContextualKeyword(noalias self: *const LexerType, comptime keyword: string) bool {
return self.token == .t_identifier and strings.eqlComptime(self.raw(), keyword);
}
@@ -1878,7 +1878,7 @@ fn NewLexer_(
}
}
fn scanCommentText(lexer: *LexerType, for_pragma: bool) void {
fn scanCommentText(noalias lexer: *LexerType, for_pragma: bool) void {
const text = lexer.source.contents[lexer.start..lexer.end];
const has_legal_annotation = text.len > 2 and text[2] == '!';
const is_multiline_comment = text.len > 1 and text[1] == '*';
@@ -1936,7 +1936,7 @@ fn NewLexer_(
}
/// This scans a "// comment" in a single pass over the input.
fn scanSingleLineComment(lexer: *LexerType) void {
fn scanSingleLineComment(noalias lexer: *LexerType) void {
while (true) {
// Find index of newline (ASCII/Unicode), non-ASCII, '#', or '@'.
if (bun.highway.indexOfNewlineOrNonASCIIOrHashOrAt(lexer.remaining())) |relative_index| {
@@ -1998,7 +1998,7 @@ fn NewLexer_(
/// Scans the string for a pragma.
/// offset is used when there's an issue with the JSX pragma later on.
/// Returns the byte length to advance by if found, otherwise 0.
fn scanPragma(lexer: *LexerType, offset_for_errors: usize, chunk: string, allow_newline: bool) usize {
fn scanPragma(noalias lexer: *LexerType, offset_for_errors: usize, chunk: string, allow_newline: bool) usize {
if (!lexer.has_pure_comment_before) {
if (strings.hasPrefixWithWordBoundary(chunk, "__PURE__")) {
lexer.has_pure_comment_before = true;
@@ -2187,10 +2187,10 @@ fn NewLexer_(
}
}
pub fn utf16ToString(lexer: *LexerType, js: JavascriptString) !string {
pub fn utf16ToString(noalias lexer: *const LexerType, js: JavascriptString) !string {
return try strings.toUTF8AllocWithType(lexer.allocator, []const u16, js);
}
pub fn nextInsideJSXElement(lexer: *LexerType) !void {
pub fn nextInsideJSXElement(noalias lexer: *LexerType) !void {
lexer.assertNotJSON();
lexer.has_newline_before = false;
@@ -2412,7 +2412,7 @@ fn NewLexer_(
}
}
pub fn expectJSXElementChild(lexer: *LexerType, token: T) !void {
pub fn expectJSXElementChild(noalias lexer: *LexerType, token: T) !void {
lexer.assertNotJSON();
if (lexer.token != token) {
@@ -2542,7 +2542,7 @@ fn NewLexer_(
}
}
fn maybeDecodeJSXEntity(lexer: *LexerType, text: string, cursor: *strings.CodepointIterator.Cursor) void {
fn maybeDecodeJSXEntity(noalias lexer: *LexerType, text: string, noalias cursor: *strings.CodepointIterator.Cursor) void {
lexer.assertNotJSON();
if (strings.indexOfChar(text[cursor.width + cursor.i ..], ';')) |length| {
@@ -2606,7 +2606,7 @@ fn NewLexer_(
}
}
}
pub fn expectInsideJSXElement(lexer: *LexerType, token: T) !void {
pub fn expectInsideJSXElement(noalias lexer: *LexerType, token: T) !void {
lexer.assertNotJSON();
if (lexer.token != token) {
@@ -2628,7 +2628,7 @@ fn NewLexer_(
try lexer.nextInsideJSXElement();
}
fn scanRegExpValidateAndStep(lexer: *LexerType) !void {
fn scanRegExpValidateAndStep(noalias lexer: *LexerType) !void {
lexer.assertNotJSON();
if (lexer.code_point == '\\') {
@@ -2664,7 +2664,7 @@ fn NewLexer_(
lexer.rescan_close_brace_as_template_token = false;
}
pub fn rawTemplateContents(lexer: *LexerType) string {
pub fn rawTemplateContents(noalias lexer: *LexerType) string {
lexer.assertNotJSON();
var text: string = undefined;
@@ -2716,7 +2716,7 @@ fn NewLexer_(
return bytes.toOwnedSliceLength(end);
}
fn parseNumericLiteralOrDot(lexer: *LexerType) !void {
fn parseNumericLiteralOrDot(noalias lexer: *LexerType) !void {
// Number or dot;
const first = lexer.code_point;
lexer.step();

View File

@@ -111,7 +111,7 @@ const JSXImport = enum {
Fragment: ?LocRef = null,
createElement: ?LocRef = null,
pub fn get(this: *const Symbols, name: []const u8) ?Ref {
pub fn get(noalias this: *const Symbols, name: []const u8) ?Ref {
if (strings.eqlComptime(name, "jsx")) return if (this.jsx) |jsx| jsx.ref.? else null;
if (strings.eqlComptime(name, "jsxDEV")) return if (this.jsxDEV) |jsx| jsx.ref.? else null;
if (strings.eqlComptime(name, "jsxs")) return if (this.jsxs) |jsxs| jsxs.ref.? else null;
@@ -120,7 +120,7 @@ const JSXImport = enum {
return null;
}
pub fn getWithTag(this: *const Symbols, tag: JSXImport) ?Ref {
pub fn getWithTag(noalias this: *const Symbols, tag: JSXImport) ?Ref {
return switch (tag) {
.jsx => if (this.jsx) |jsx| jsx.ref.? else null,
.jsxDEV => if (this.jsxDEV) |jsx| jsx.ref.? else null,
@@ -130,7 +130,7 @@ const JSXImport = enum {
};
}
pub fn runtimeImportNames(this: *const Symbols, buf: *[3]string) []const string {
pub fn runtimeImportNames(noalias this: *const Symbols, buf: *[3]string) []const string {
var i: usize = 0;
if (this.jsxDEV != null) {
bun.assert(this.jsx == null); // we should never end up with this in the same file
@@ -156,7 +156,7 @@ const JSXImport = enum {
return buf[0..i];
}
pub fn sourceImportNames(this: *const Symbols) []const string {
pub fn sourceImportNames(noalias this: *const Symbols) []const string {
return if (this.createElement != null) &[_]string{"createElement"} else &[_]string{};
}
};
@@ -447,7 +447,7 @@ const VisitArgsOpts = struct {
pub fn ExpressionTransposer(
comptime ContextType: type,
comptime StateType: type,
comptime visitor: fn (ptr: *ContextType, arg: Expr, state: StateType) Expr,
comptime visitor: fn (noalias ptr: *ContextType, arg: Expr, state: StateType) Expr,
) type {
return struct {
pub const Context = ContextType;
@@ -2648,7 +2648,7 @@ pub const StringVoidMap = struct {
return StringVoidMap{ .allocator = allocator };
}
pub fn reset(this: *StringVoidMap) void {
pub fn reset(noalias this: *StringVoidMap) void {
// We must reset or the hash table will contain invalid pointers
this.map.clearRetainingCapacity();
}
@@ -3201,7 +3201,7 @@ pub const Parser = struct {
analyze_tracer.end();
}
fn _parse(self: *Parser, comptime ParserType: type) !js_ast.Result {
fn _parse(noalias self: *Parser, comptime ParserType: type) !js_ast.Result {
const prev_action = bun.crash_handler.current_action;
defer bun.crash_handler.current_action = prev_action;
bun.crash_handler.current_action = .{ .parse = self.source.path.text };
@@ -4507,7 +4507,7 @@ pub const KnownGlobal = enum {
pub const map = bun.ComptimeEnumMap(KnownGlobal);
pub noinline fn maybeMarkConstructorAsPure(e: *E.New, symbols: []const Symbol) void {
pub noinline fn maybeMarkConstructorAsPure(noalias e: *E.New, symbols: []const Symbol) void {
const id = if (e.target.data == .e_identifier) e.target.data.e_identifier.ref else return;
const symbol = &symbols[id.innerIndex()];
if (symbol.kind != .unbound)
@@ -5102,7 +5102,7 @@ fn NewParser_(
return p.options.bundle and p.source.index.isRuntime();
}
pub fn transposeImport(p: *P, arg: Expr, state: *const TransposeState) Expr {
pub fn transposeImport(noalias p: *P, arg: Expr, state: *const TransposeState) Expr {
// The argument must be a string
if (arg.data.as(.e_string)) |str| {
// Ignore calls to import() if the control flow is provably dead here.
@@ -5141,7 +5141,7 @@ fn NewParser_(
}, state.loc);
}
pub fn transposeRequireResolve(p: *P, arg: Expr, require_resolve_ref: Expr) Expr {
pub fn transposeRequireResolve(noalias p: *P, arg: Expr, require_resolve_ref: Expr) Expr {
// The argument must be a string
if (arg.data == .e_string) {
return p.transposeRequireResolveKnownString(arg);
@@ -5162,7 +5162,7 @@ fn NewParser_(
}, arg.loc);
}
pub inline fn transposeRequireResolveKnownString(p: *P, arg: Expr) Expr {
pub inline fn transposeRequireResolveKnownString(noalias p: *P, arg: Expr) Expr {
bun.assert(arg.data == .e_string);
// Ignore calls to import() if the control flow is provably dead here.
@@ -5185,7 +5185,7 @@ fn NewParser_(
);
}
pub fn transposeRequire(p: *P, arg: Expr, state: *const TransposeState) Expr {
pub fn transposeRequire(noalias p: *P, arg: Expr, state: *const TransposeState) Expr {
if (!p.options.features.allow_runtime) {
const args = p.allocator.alloc(Expr, 1) catch bun.outOfMemory();
args[0] = arg;
@@ -5282,7 +5282,7 @@ fn NewParser_(
return p.options.features.unwrap_commonjs_to_esm;
}
fn isBindingUsed(p: *P, binding: Binding, default_export_ref: Ref) bool {
fn isBindingUsed(noalias p: *P, binding: Binding, default_export_ref: Ref) bool {
switch (binding.data) {
.b_identifier => |ident| {
if (default_export_ref.eql(ident.ref)) return true;
@@ -5319,7 +5319,7 @@ fn NewParser_(
}
}
pub fn treeShake(p: *P, parts: *[]js_ast.Part, merge: bool) void {
pub fn treeShake(noalias p: *P, parts: *[]js_ast.Part, merge: bool) void {
var parts_: []js_ast.Part = parts.*;
defer {
if (merge and parts_.len > 1) {
@@ -5433,7 +5433,7 @@ fn NewParser_(
};
if (is_dead) {
p.clearSymbolUsagesFromDeadPart(part);
p.clearSymbolUsagesFromDeadPart(&part);
continue;
}
@@ -5458,7 +5458,7 @@ fn NewParser_(
pub const Hoisted = Binding.ToExpr(P, P.wrapIdentifierHoisting);
};
fn clearSymbolUsagesFromDeadPart(p: *P, part: js_ast.Part) void {
fn clearSymbolUsagesFromDeadPart(noalias p: *P, part: *const js_ast.Part) void {
const symbol_use_refs = part.symbol_uses.keys();
const symbol_use_values = part.symbol_uses.values();
var symbols = p.symbols.items;
@@ -5472,7 +5472,7 @@ fn NewParser_(
}
}
pub fn s(_: *P, t: anytype, loc: logger.Loc) Stmt {
pub fn s(noalias _: *const P, t: anytype, loc: logger.Loc) Stmt {
const Type = @TypeOf(t);
if (!is_typescript_enabled and (Type == S.TypeScript or Type == *S.TypeScript)) {
@compileError("Attempted to use TypeScript syntax in a non-TypeScript environment");
@@ -5557,7 +5557,7 @@ fn NewParser_(
return freq;
}
pub fn newExpr(p: *P, t: anytype, loc: logger.Loc) Expr {
pub fn newExpr(noalias p: *P, t: anytype, loc: logger.Loc) Expr {
const Type = @TypeOf(t);
comptime {
@@ -5612,11 +5612,11 @@ fn NewParser_(
}
}
pub fn findSymbol(p: *P, loc: logger.Loc, name: string) !FindSymbolResult {
pub fn findSymbol(noalias p: *P, loc: logger.Loc, name: string) !FindSymbolResult {
return findSymbolWithRecordUsage(p, loc, name, true);
}
pub fn findSymbolWithRecordUsage(p: *P, loc: logger.Loc, name: string, comptime record_usage: bool) !FindSymbolResult {
pub fn findSymbolWithRecordUsage(noalias p: *P, loc: logger.Loc, name: string, comptime record_usage: bool) !FindSymbolResult {
var declare_loc: logger.Loc = logger.Loc.Empty;
var is_inside_with_scope = false;
// This function can show up in profiling.
@@ -5720,7 +5720,7 @@ fn NewParser_(
};
}
pub fn recordExportedBinding(p: *P, binding: Binding) void {
pub fn recordExportedBinding(noalias p: *P, binding: Binding) void {
switch (binding.data) {
.b_missing => {},
.b_identifier => |ident| {
@@ -5739,7 +5739,7 @@ fn NewParser_(
}
}
pub fn recordExport(p: *P, loc: logger.Loc, alias: string, ref: Ref) !void {
pub fn recordExport(noalias p: *P, loc: logger.Loc, alias: string, ref: Ref) !void {
if (p.named_exports.get(alias)) |name| {
// Duplicate exports are an error
var notes = try p.allocator.alloc(logger.Data, 1);
@@ -5760,11 +5760,11 @@ fn NewParser_(
}
}
fn isDeoptimizedCommonJS(p: *P) bool {
fn isDeoptimizedCommonJS(noalias p: *P) bool {
return p.commonjs_named_exports_deoptimized and p.commonjs_named_exports.count() > 0;
}
pub fn recordUsage(p: *P, ref: Ref) void {
pub fn recordUsage(noalias p: *P, ref: Ref) void {
if (p.is_revisit_for_substitution) return;
// The use count stored in the symbol is used for generating symbol names
// during minification. These counts shouldn't include references inside dead
@@ -5788,7 +5788,7 @@ fn NewParser_(
}
}
fn logArrowArgErrors(p: *P, errors: *DeferredArrowArgErrors) void {
fn logArrowArgErrors(noalias p: *P, errors: *DeferredArrowArgErrors) void {
if (errors.invalid_expr_await.len > 0) {
const r = errors.invalid_expr_await;
p.log.addRangeError(p.source, r, "Cannot use an \"await\" expression here") catch unreachable;
@@ -5800,7 +5800,7 @@ fn NewParser_(
}
}
fn keyNameForError(p: *P, key: js_ast.Expr) string {
fn keyNameForError(noalias p: *P, key: js_ast.Expr) string {
switch (key.data) {
.e_string => {
return key.data.e_string.string(p.allocator) catch unreachable;
@@ -5815,7 +5815,7 @@ fn NewParser_(
}
/// This function is very very hot.
pub fn handleIdentifier(p: *P, loc: logger.Loc, ident: E.Identifier, original_name: ?string, opts: IdentifierOpts) Expr {
pub fn handleIdentifier(noalias p: *P, loc: logger.Loc, ident: E.Identifier, original_name: ?string, opts: IdentifierOpts) Expr {
const ref = ident.ref;
if (p.options.features.inlining) {
@@ -5951,7 +5951,7 @@ fn NewParser_(
}
pub fn generateImportStmt(
p: *P,
noalias p: *P,
import_path: string,
imports: anytype,
parts: *ListManaged(js_ast.Part),
@@ -6041,7 +6041,7 @@ fn NewParser_(
}
pub fn generateReactRefreshImport(
p: *P,
noalias p: *P,
parts: *ListManaged(js_ast.Part),
import_path: []const u8,
clauses: []const ReactRefreshImportClause,
@@ -6058,7 +6058,7 @@ fn NewParser_(
};
fn generateReactRefreshImportHmr(
p: *P,
noalias p: *P,
parts: *ListManaged(js_ast.Part),
import_path: []const u8,
clauses: []const ReactRefreshImportClause,
@@ -6143,7 +6143,7 @@ fn NewParser_(
});
}
fn substituteSingleUseSymbolInStmt(p: *P, stmt: Stmt, ref: Ref, replacement: Expr) bool {
fn substituteSingleUseSymbolInStmt(noalias p: *P, stmt: Stmt, ref: Ref, replacement: Expr) bool {
const expr: *Expr = brk: {
switch (stmt.data) {
.s_expr => |exp| {
@@ -6217,7 +6217,7 @@ fn NewParser_(
}
fn substituteSingleUseSymbolInExpr(
p: *P,
noalias p: *P,
expr: Expr,
ref: Ref,
replacement: Expr,
@@ -6631,7 +6631,7 @@ fn NewParser_(
return .{ .failure = expr };
}
pub fn prepareForVisitPass(p: *P) anyerror!void {
pub fn prepareForVisitPass(noalias p: *P) anyerror!void {
{
var i: usize = 0;
p.scope_order_to_visit = try p.allocator.alloc(ScopeOrder, p.scopes_in_order.items.len);
@@ -6763,7 +6763,7 @@ fn NewParser_(
return p.options.bundle or p.options.features.minify_identifiers;
}
fn hoistSymbols(p: *P, scope: *js_ast.Scope) void {
fn hoistSymbols(noalias p: *P, scope: *js_ast.Scope) void {
if (!scope.kindStopsHoisting()) {
var iter = scope.members.iterator();
const allocator = p.allocator;
@@ -6952,7 +6952,7 @@ fn NewParser_(
return head;
}
fn pushScopeForVisitPass(p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) anyerror!void {
fn pushScopeForVisitPass(noalias p: *P, kind: js_ast.Scope.Kind, loc: logger.Loc) anyerror!void {
const order = p.nextScopeInOrderForVisitPass();
// Sanity-check that the scopes generated by the first and second passes match
@@ -6972,7 +6972,7 @@ fn NewParser_(
try p.scopes_for_current_part.append(p.allocator, order.scope);
}
fn pushScopeForParsePass(p: *P, comptime kind: js_ast.Scope.Kind, loc: logger.Loc) !usize {
fn pushScopeForParsePass(noalias p: *P, comptime kind: js_ast.Scope.Kind, loc: logger.Loc) !usize {
var parent: *Scope = p.current_scope;
const allocator = p.allocator;
var scope = try allocator.create(Scope);
@@ -7047,7 +7047,7 @@ fn NewParser_(
// from expression to binding should be written to "invalidLog" instead. That
// way we can potentially keep this as an expression if it turns out it's not
// needed as a binding after all.
fn convertExprToBinding(p: *P, expr: ExprNodeIndex, invalid_loc: *LocList) ?Binding {
fn convertExprToBinding(noalias p: *P, expr: ExprNodeIndex, invalid_loc: *LocList) ?Binding {
switch (expr.data) {
.e_missing => {
return null;
@@ -7150,7 +7150,7 @@ fn NewParser_(
return null;
}
fn convertExprToBindingAndInitializer(p: *P, _expr: *ExprNodeIndex, invalid_log: *LocList, is_spread: bool) ExprBindingTuple {
fn convertExprToBindingAndInitializer(noalias p: *P, _expr: *ExprNodeIndex, invalid_log: *LocList, is_spread: bool) ExprBindingTuple {
var initializer: ?ExprNodeIndex = null;
var expr = _expr;
// zig syntax is sometimes painful
@@ -7581,13 +7581,13 @@ fn NewParser_(
}
};
fn forbidLexicalDecl(p: *P, loc: logger.Loc) anyerror!void {
fn forbidLexicalDecl(noalias p: *const P, loc: logger.Loc) anyerror!void {
try p.log.addError(p.source, loc, "Cannot use a declaration in a single-statement context");
}
/// If we attempt to parse TypeScript syntax outside of a TypeScript file
/// make it a compile error
inline fn markTypeScriptOnly(_: *const P) void {
inline fn markTypeScriptOnly(noalias _: *const P) void {
if (comptime !is_typescript_enabled) {
@compileError("This function can only be used in TypeScript");
}
@@ -7598,7 +7598,7 @@ fn NewParser_(
}
}
fn logExprErrors(p: *P, errors: *DeferredErrors) void {
fn logExprErrors(noalias p: *P, noalias errors: *DeferredErrors) void {
if (errors.invalid_expr_default_value) |r| {
p.log.addRangeError(
p.source,
@@ -7618,7 +7618,7 @@ fn NewParser_(
// This assumes the "function" token has already been parsed
fn parseFnStmt(p: *P, loc: logger.Loc, opts: *ParseStatementOptions, asyncRange: ?logger.Range) !Stmt {
fn parseFnStmt(noalias p: *P, loc: logger.Loc, noalias opts: *ParseStatementOptions, asyncRange: ?logger.Range) !Stmt {
const is_generator = p.lexer.token == T.t_asterisk;
const is_async = asyncRange != null;
@@ -7747,7 +7747,7 @@ fn NewParser_(
);
}
fn popAndDiscardScope(p: *P, scope_index: usize) void {
fn popAndDiscardScope(noalias p: *P, scope_index: usize) void {
// Move up to the parent scope
const to_discard = p.current_scope;
const parent = to_discard.parent orelse unreachable;
@@ -11451,14 +11451,14 @@ fn NewParser_(
};
}
fn requireInitializers(p: *P, comptime kind: S.Local.Kind, decls: []G.Decl) anyerror!void {
fn requireInitializers(noalias p: *P, comptime kind: S.Local.Kind, decls: []G.Decl) anyerror!void {
const what = switch (kind) {
.k_await_using, .k_using => "declaration",
.k_const => "constant",
else => @compileError("unreachable"),
};
for (decls) |decl| {
for (decls) |*decl| {
if (decl.value == null) {
switch (decl.binding.data) {
.b_identifier => |ident| {
@@ -13038,7 +13038,7 @@ fn NewParser_(
return @as(u32, @intCast(index));
}
pub fn popScope(p: *P) void {
pub fn popScope(noalias p: *P) void {
const current_scope = p.current_scope;
// We cannot rename anything inside a scope containing a direct eval() call
if (current_scope.contains_direct_eval) {
@@ -13097,7 +13097,7 @@ fn NewParser_(
p.current_scope = current_scope.parent orelse p.panic("Internal error: attempted to call popScope() on the topmost scope", .{});
}
pub fn markExprAsParenthesized(_: *P, expr: *Expr) void {
pub fn markExprAsParenthesized(noalias _: *P, expr: *Expr) void {
switch (expr.data) {
.e_array => |ex| {
ex.is_parenthesized = true;
@@ -13865,7 +13865,7 @@ fn NewParser_(
return ExprListLoc{ .list = ExprNodeList.fromList(args), .loc = close_paren_loc };
}
pub fn parseSuffix(p: *P, _left: Expr, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
pub fn parseSuffix(noalias p: *P, _left: Expr, level: Level, noalias errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
var left = _left;
var optional_chain: ?js_ast.OptionalChain = null;
while (true) {
@@ -14675,7 +14675,7 @@ fn NewParser_(
Output.panic(fmt ++ "\n{s}", args ++ .{panic_buffer[0..panic_stream.pos]});
}
pub fn parsePrefix(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
pub fn parsePrefix(noalias p: *P, level: Level, noalias errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
const loc = p.lexer.loc();
const l = @intFromEnum(level);
// Output.print("Parse Prefix {s}:{s} @{s} ", .{ p.lexer.token, p.lexer.raw(), @tagName(level) });
@@ -15378,7 +15378,7 @@ fn NewParser_(
}
// Note: The caller has already parsed the "import" keyword
fn parseImportExpr(p: *P, loc: logger.Loc, level: Level) anyerror!Expr {
fn parseImportExpr(noalias p: *P, loc: logger.Loc, level: Level) anyerror!Expr {
// Parse an "import.meta" expression
if (p.lexer.token == .t_dot) {
p.esm_import_keyword = js_lexer.rangeOfIdentifier(p.source, loc);
@@ -15454,7 +15454,7 @@ fn NewParser_(
}, loc);
}
fn parseJSXPropValueIdentifier(p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
fn parseJSXPropValueIdentifier(noalias p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
// Use NextInsideJSXElement() not Next() so we can parse a JSX-style string literal
try p.lexer.nextInsideJSXElement();
if (p.lexer.token == .t_string_literal) {
@@ -15473,7 +15473,7 @@ fn NewParser_(
}
}
fn parseJSXElement(p: *P, loc: logger.Loc) anyerror!Expr {
fn parseJSXElement(noalias p: *P, loc: logger.Loc) anyerror!Expr {
if (only_scan_imports_and_do_not_visit) {
p.needs_jsx_import = true;
}
@@ -15748,7 +15748,7 @@ fn NewParser_(
}
}
fn willNeedBindingPattern(p: *P) bool {
fn willNeedBindingPattern(noalias p: *const P) bool {
return switch (p.lexer.token) {
// "[a] = b;"
.t_equals => true,
@@ -15760,7 +15760,7 @@ fn NewParser_(
};
}
fn appendPart(p: *P, parts: *ListManaged(js_ast.Part), stmts: []Stmt) anyerror!void {
fn appendPart(noalias p: *P, parts: *ListManaged(js_ast.Part), stmts: []Stmt) anyerror!void {
// Reuse the memory if possible
// This is reusable if the last part turned out to be dead
p.symbol_uses.clearRetainingCapacity();
@@ -15829,7 +15829,7 @@ fn NewParser_(
p.had_commonjs_named_exports_this_visit = false;
} else if (p.declared_symbols.len() > 0 or p.symbol_uses.count() > 0) {
// if the part is dead, invalidate all the usage counts
p.clearSymbolUsagesFromDeadPart(.{ .stmts = undefined, .declared_symbols = p.declared_symbols, .symbol_uses = p.symbol_uses });
p.clearSymbolUsagesFromDeadPart(&.{ .stmts = undefined, .declared_symbols = p.declared_symbols, .symbol_uses = p.symbol_uses });
p.declared_symbols.clearRetainingCapacity();
p.import_records_for_current_part.clearRetainingCapacity();
}
@@ -16009,7 +16009,7 @@ fn NewParser_(
}
}
fn recordDeclaredSymbol(p: *P, ref: Ref) anyerror!void {
fn recordDeclaredSymbol(noalias p: *P, ref: Ref) anyerror!void {
bun.assert(ref.isSymbol());
try p.declared_symbols.append(p.allocator, DeclaredSymbol{
.ref = ref,
@@ -16018,7 +16018,7 @@ fn NewParser_(
}
// public for JSNode.JSXWriter usage
pub inline fn visitExpr(p: *P, expr: Expr) Expr {
pub inline fn visitExpr(noalias p: *P, expr: Expr) Expr {
if (only_scan_imports_and_do_not_visit) {
@compileError("only_scan_imports_and_do_not_visit must not run this.");
}
@@ -19054,7 +19054,7 @@ fn NewParser_(
}
const visitors = struct {
pub fn s_import(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Import) !void {
pub fn s_import(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Import) !void {
try p.recordDeclaredSymbol(data.namespace_ref);
if (data.default_name) |default_name| {
@@ -19069,7 +19069,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_export_clause(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportClause) !void {
pub fn s_export_clause(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ExportClause) !void {
// "export {foo}"
var end: usize = 0;
var any_replaced = false;
@@ -19135,7 +19135,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_export_from(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportFrom) !void {
pub fn s_export_from(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ExportFrom) !void {
// "export {foo} from 'path'"
const name = p.loadNameFromRef(data.namespace_ref);
@@ -19186,7 +19186,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_export_star(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportStar) !void {
pub fn s_export_star(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ExportStar) !void {
// "export * from 'path'"
const name = p.loadNameFromRef(data.namespace_ref);
@@ -19206,7 +19206,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_export_default(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportDefault) !void {
pub fn s_export_default(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ExportDefault) !void {
defer {
if (data.default_name.ref) |ref| {
p.recordDeclaredSymbol(ref) catch unreachable;
@@ -19482,7 +19482,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_function(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Function) !void {
pub fn s_function(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Function) !void {
// We mark it as dead, but the value may not actually be dead
// We just want to be sure to not increment the usage counts for anything in the function
const mark_as_dead = p.options.features.dead_code_elimination and data.func.flags.contains(.is_export) and
@@ -19571,7 +19571,7 @@ fn NewParser_(
return;
}
pub fn s_class(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Class) !void {
pub fn s_class(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Class) !void {
const mark_as_dead = p.options.features.dead_code_elimination and data.is_export and
p.options.features.replace_exports.count() > 0 and p.isExportToEliminate(data.class.class_name.?.ref.?);
const original_is_dead = p.is_control_flow_dead;
@@ -19632,7 +19632,7 @@ fn NewParser_(
return;
}
pub fn s_export_equals(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ExportEquals) !void {
pub fn s_export_equals(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ExportEquals) !void {
// "module.exports = value"
stmts.append(
Stmt.assign(
@@ -19643,7 +19643,7 @@ fn NewParser_(
p.recordUsage(p.module_ref);
return;
}
pub fn s_break(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Break) !void {
pub fn s_break(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Break) !void {
if (data.label) |*label| {
const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected label to have a ref", .{}, label.loc));
const res = p.findLabelSymbol(label.loc, name);
@@ -19659,7 +19659,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_continue(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Continue) !void {
pub fn s_continue(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Continue) !void {
if (data.label) |*label| {
const name = p.loadNameFromRef(label.ref orelse p.panicLoc("Expected continue label to have a ref", .{}, label.loc));
const res = p.findLabelSymbol(label.loc, name);
@@ -19675,7 +19675,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_label(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Label) !void {
pub fn s_label(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Label) !void {
p.pushScopeForVisitPass(.label, stmt.loc) catch unreachable;
const name = p.loadNameFromRef(data.name.ref.?);
const ref = p.newSymbol(.label, name) catch unreachable;
@@ -19693,7 +19693,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_local(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Local, was_after_after_const_local_prefix: bool) !void {
pub fn s_local(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Local, was_after_after_const_local_prefix: bool) !void {
// TODO: Silently remove unsupported top-level "await" in dead code branches
// (this was from 'await using' syntax)
@@ -19798,7 +19798,7 @@ fn NewParser_(
return;
}
pub fn s_expr(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.SExpr) !void {
pub fn s_expr(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.SExpr) !void {
const should_trim_primitive = p.options.features.dead_code_elimination and
(p.options.features.minify_syntax and data.value.isPrimitiveLiteral());
p.stmt_expr_value = data.value.data;
@@ -19900,11 +19900,11 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_throw(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Throw) !void {
pub fn s_throw(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Throw) !void {
data.value = p.visitExpr(data.value);
try stmts.append(stmt.*);
}
pub fn s_return(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Return) !void {
pub fn s_return(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Return) !void {
// Forbid top-level return inside modules with ECMAScript-style exports
if (p.fn_or_arrow_data_visit.is_outside_fn_or_arrow) {
const where = where: {
@@ -19934,7 +19934,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_block(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Block) !void {
pub fn s_block(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Block) !void {
{
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
@@ -19962,7 +19962,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_with(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.With) !void {
pub fn s_with(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.With) !void {
data.value = p.visitExpr(data.value);
p.pushScopeForVisitPass(.with, data.body_loc) catch unreachable;
@@ -19980,7 +19980,7 @@ fn NewParser_(
p.popScope();
try stmts.append(stmt.*);
}
pub fn s_while(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.While) !void {
pub fn s_while(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.While) !void {
data.test_ = p.visitExpr(data.test_);
data.body = p.visitLoopBody(data.body);
@@ -19992,14 +19992,14 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_do_while(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.DoWhile) !void {
pub fn s_do_while(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.DoWhile) !void {
data.body = p.visitLoopBody(data.body);
data.test_ = p.visitExpr(data.test_);
data.test_ = SideEffects.simplifyBoolean(p, data.test_);
try stmts.append(stmt.*);
}
pub fn s_if(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.If) !void {
pub fn s_if(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.If) !void {
data.test_ = p.visitExpr(data.test_);
if (p.options.features.minify_syntax) {
@@ -20098,7 +20098,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_for(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.For) !void {
pub fn s_for(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.For) !void {
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
if (data.init) |initst| {
@@ -20137,7 +20137,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_for_in(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ForIn) !void {
pub fn s_for_in(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ForIn) !void {
{
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
defer p.popScope();
@@ -20173,7 +20173,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_for_of(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.ForOf) !void {
pub fn s_for_of(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.ForOf) !void {
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
defer p.popScope();
_ = p.visitForLoopInit(data.init, true);
@@ -20235,7 +20235,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_try(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Try) !void {
pub fn s_try(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Try) !void {
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
{
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, data.body);
@@ -20273,7 +20273,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_switch(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Switch) !void {
pub fn s_switch(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Switch) !void {
data.test_ = p.visitExpr(data.test_);
{
p.pushScopeForVisitPass(.block, data.body_loc) catch unreachable;
@@ -20298,7 +20298,7 @@ fn NewParser_(
try stmts.append(stmt.*);
}
pub fn s_enum(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Enum, was_after_after_const_local_prefix: bool) !void {
pub fn s_enum(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Enum, was_after_after_const_local_prefix: bool) !void {
// Do not end the const local prefix after TypeScript enums. We process
// them first within their scope so that they are inlined into all code in
@@ -20490,7 +20490,7 @@ fn NewParser_(
);
return;
}
pub fn s_namespace(p: *P, stmts: *ListManaged(Stmt), stmt: *Stmt, data: *S.Namespace) !void {
pub fn s_namespace(noalias p: *P, noalias stmts: *ListManaged(Stmt), noalias stmt: *Stmt, noalias data: *S.Namespace) !void {
p.recordDeclaredSymbol(data.name.ref.?) catch unreachable;
// Scan ahead for any variables inside this namespace. This must be done
@@ -20538,7 +20538,7 @@ fn NewParser_(
return p.options.features.replace_exports.contains(symbol_name);
}
fn visitDecls(p: *P, decls: []G.Decl, was_const: bool, comptime is_possibly_decl_to_remove: bool) usize {
fn visitDecls(noalias p: *P, decls: []G.Decl, was_const: bool, comptime is_possibly_decl_to_remove: bool) usize {
var j: usize = 0;
var out_decls = decls;
for (decls) |*decl| {
@@ -20809,7 +20809,7 @@ fn NewParser_(
}
}
pub fn appendIfBodyPreservingScope(p: *P, stmts: *ListManaged(Stmt), body: Stmt) anyerror!void {
pub fn appendIfBodyPreservingScope(noalias p: *P, stmts: *ListManaged(Stmt), body: Stmt) anyerror!void {
switch (body.data) {
.s_block => |block| {
var keep_block = false;
@@ -20859,8 +20859,8 @@ fn NewParser_(
}
fn generateClosureForTypeScriptNamespaceOrEnum(
p: *P,
stmts: *ListManaged(Stmt),
noalias p: *P,
noalias stmts: *ListManaged(Stmt),
stmt_loc: logger.Loc,
is_export: bool,
name_loc: logger.Loc,
@@ -21010,7 +21010,7 @@ fn NewParser_(
}
fn lowerClass(
p: *P,
noalias p: *P,
stmtorexpr: js_ast.StmtOrExpr,
) []Stmt {
switch (stmtorexpr) {
@@ -21333,7 +21333,7 @@ fn NewParser_(
}
}
fn serializeMetadata(p: *P, ts_metadata: TypeScript.Metadata) !Expr {
fn serializeMetadata(noalias p: *P, ts_metadata: TypeScript.Metadata) !Expr {
return switch (ts_metadata) {
.m_none,
.m_any,
@@ -21614,7 +21614,7 @@ fn NewParser_(
return Expr.initIdentifier(ref, loc);
}
fn wrapInlinedEnum(p: *P, value: Expr, comment: string) Expr {
fn wrapInlinedEnum(noalias p: *P, value: Expr, comment: string) Expr {
if (bun.strings.containsComptime(comment, "*/")) {
// Don't wrap with a comment
return value;
@@ -21627,7 +21627,7 @@ fn NewParser_(
}, value.loc);
}
fn valueForDefine(p: *P, loc: logger.Loc, assign_target: js_ast.AssignTarget, is_delete_target: bool, define_data: *const DefineData) Expr {
fn valueForDefine(noalias p: *P, loc: logger.Loc, assign_target: js_ast.AssignTarget, is_delete_target: bool, define_data: *const DefineData) Expr {
switch (define_data.value) {
.e_identifier => {
return p.handleIdentifier(
@@ -21653,7 +21653,7 @@ fn NewParser_(
};
}
fn isDotDefineMatch(p: *P, expr: Expr, parts: []const string) bool {
fn isDotDefineMatch(noalias p: *P, expr: Expr, parts: []const string) bool {
switch (expr.data) {
.e_dot => |ex| {
if (parts.len > 1) {
@@ -21712,7 +21712,7 @@ fn NewParser_(
return false;
}
fn visitBinding(p: *P, binding: BindingNodeIndex, duplicate_arg_check: ?*StringVoidMap) void {
fn visitBinding(noalias p: *P, binding: BindingNodeIndex, duplicate_arg_check: ?*StringVoidMap) void {
switch (binding.data) {
.b_missing => {},
.b_identifier => |bind| {
@@ -21780,7 +21780,7 @@ fn NewParser_(
}
}
fn visitLoopBody(p: *P, stmt: StmtNodeIndex) StmtNodeIndex {
fn visitLoopBody(noalias p: *P, stmt: StmtNodeIndex) StmtNodeIndex {
const old_is_inside_loop = p.fn_or_arrow_data_visit.is_inside_loop;
p.fn_or_arrow_data_visit.is_inside_loop = true;
p.loop_body = stmt.data;
@@ -21789,7 +21789,7 @@ fn NewParser_(
return res;
}
fn visitSingleStmtBlock(p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
fn visitSingleStmtBlock(noalias p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
var new_stmt = stmt;
p.pushScopeForVisitPass(.block, stmt.loc) catch unreachable;
var stmts = ListManaged(Stmt).initCapacity(p.allocator, stmt.data.s_block.stmts.len) catch unreachable;
@@ -21804,7 +21804,7 @@ fn NewParser_(
return new_stmt;
}
fn visitSingleStmt(p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
fn visitSingleStmt(noalias p: *P, stmt: Stmt, kind: StmtsKind) Stmt {
if (stmt.data == .s_block) {
return p.visitSingleStmtBlock(stmt, kind);
}
@@ -21831,7 +21831,7 @@ fn NewParser_(
}
// One statement could potentially expand to several statements
fn stmtsToSingleStmt(p: *P, loc: logger.Loc, stmts: []Stmt) Stmt {
fn stmtsToSingleStmt(noalias p: *P, loc: logger.Loc, stmts: []Stmt) Stmt {
if (stmts.len == 0) {
return Stmt{ .data = Prefill.Data.SEmpty, .loc = loc };
}
@@ -21844,7 +21844,7 @@ fn NewParser_(
return p.s(S.Block{ .stmts = stmts }, loc);
}
fn findLabelSymbol(p: *P, loc: logger.Loc, name: string) FindLabelSymbolResult {
fn findLabelSymbol(noalias p: *P, loc: logger.Loc, name: string) FindLabelSymbolResult {
var res = FindLabelSymbolResult{ .ref = Ref.None, .is_loop = false };
var _scope: ?*Scope = p.current_scope;
@@ -21874,7 +21874,7 @@ fn NewParser_(
return res;
}
fn visitClass(p: *P, name_scope_loc: logger.Loc, class: *G.Class, default_name_ref: Ref) Ref {
fn visitClass(noalias p: *P, name_scope_loc: logger.Loc, noalias class: *G.Class, default_name_ref: Ref) Ref {
if (only_scan_imports_and_do_not_visit) {
@compileError("only_scan_imports_and_do_not_visit must not run this.");
}

View File

@@ -4845,7 +4845,7 @@ pub fn NewCodePointIterator(comptime CodePointType_: type, comptime zeroValue: c
unreachable;
}
pub inline fn next(it: *const Iterator, cursor: *Cursor) bool {
pub inline fn next(noalias it: *const Iterator, noalias cursor: *Cursor) bool {
const pos: u32 = @as(u32, cursor.width) + cursor.i;
if (pos >= it.bytes.len) {
return false;