mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Reduce stack space usage of parseSuffix (#21662)
### What does this PR do? Reduce stack space usage of parseSuffix ### How did you verify your code works? --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -231,6 +231,7 @@ bun ci
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
|
||||
@@ -1070,7 +1070,7 @@ pub fn NewParser_(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn keyNameForError(noalias p: *P, key: js_ast.Expr) string {
|
||||
pub fn keyNameForError(noalias p: *P, key: *const js_ast.Expr) string {
|
||||
switch (key.data) {
|
||||
.e_string => {
|
||||
return key.data.e_string.string(p.allocator) catch unreachable;
|
||||
|
||||
@@ -26,25 +26,27 @@ pub fn Parse(
|
||||
pub const parseTypeScriptImportEqualsStmt = @import("./parseTypescript.zig").ParseTypescript(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only).parseTypeScriptImportEqualsStmt;
|
||||
pub const parseTypescriptEnumStmt = @import("./parseTypescript.zig").ParseTypescript(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only).parseTypescriptEnumStmt;
|
||||
|
||||
pub inline fn parseExprOrBindings(p: *P, level: Level, errors: ?*DeferredErrors) anyerror!Expr {
|
||||
return try p.parseExprCommon(level, errors, Expr.EFlags.none);
|
||||
pub inline fn parseExprOrBindings(p: *P, level: Level, errors: ?*DeferredErrors, expr: *Expr) anyerror!void {
|
||||
return p.parseExprCommon(level, errors, Expr.EFlags.none, expr);
|
||||
}
|
||||
|
||||
pub inline fn parseExpr(p: *P, level: Level) anyerror!Expr {
|
||||
return try p.parseExprCommon(level, null, Expr.EFlags.none);
|
||||
var expr: Expr = undefined;
|
||||
try p.parseExprCommon(level, null, Expr.EFlags.none, &expr);
|
||||
return expr;
|
||||
}
|
||||
|
||||
pub inline fn parseExprWithFlags(p: *P, level: Level, flags: Expr.EFlags) anyerror!Expr {
|
||||
return try p.parseExprCommon(level, null, flags);
|
||||
pub inline fn parseExprWithFlags(p: *P, level: Level, flags: Expr.EFlags, expr: *Expr) anyerror!void {
|
||||
return p.parseExprCommon(level, null, flags, expr);
|
||||
}
|
||||
|
||||
pub fn parseExprCommon(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags) anyerror!Expr {
|
||||
pub fn parseExprCommon(p: *P, level: Level, errors: ?*DeferredErrors, flags: Expr.EFlags, expr: *Expr) anyerror!void {
|
||||
if (!p.stack_check.isSafeToRecurse()) {
|
||||
try bun.throwStackOverflow();
|
||||
}
|
||||
|
||||
const had_pure_comment_before = p.lexer.has_pure_comment_before and !p.options.ignore_dce_annotations;
|
||||
var expr = try p.parsePrefix(level, errors, flags);
|
||||
expr.* = try p.parsePrefix(level, errors, flags);
|
||||
|
||||
// There is no formal spec for "__PURE__" comments but from reverse-
|
||||
// engineering, it looks like they apply to the next CallExpression or
|
||||
@@ -52,7 +54,7 @@ pub fn Parse(
|
||||
// to the expression "a().b()".
|
||||
|
||||
if (had_pure_comment_before and level.lt(.call)) {
|
||||
expr = try p.parseSuffix(expr, @as(Level, @enumFromInt(@intFromEnum(Level.call) - 1)), errors, flags);
|
||||
try p.parseSuffix(expr, @as(Level, @enumFromInt(@intFromEnum(Level.call) - 1)), errors, flags);
|
||||
switch (expr.data) {
|
||||
.e_call => |ex| {
|
||||
ex.can_be_unwrapped_if_unused = .if_unused;
|
||||
@@ -64,7 +66,7 @@ pub fn Parse(
|
||||
}
|
||||
}
|
||||
|
||||
return try p.parseSuffix(expr, level, errors, flags);
|
||||
try p.parseSuffix(expr, level, errors, flags);
|
||||
}
|
||||
|
||||
pub fn parseYieldExpr(p: *P, loc: logger.Loc) !ExprNodeIndex {
|
||||
@@ -343,10 +345,13 @@ pub fn Parse(
|
||||
// We don't know yet whether these are arguments or expressions, so parse
|
||||
p.latest_arrow_arg_loc = p.lexer.loc();
|
||||
|
||||
var item = try p.parseExprOrBindings(.comma, &errors);
|
||||
try items_list.ensureUnusedCapacity(1);
|
||||
const item: *Expr = &items_list.unusedCapacitySlice()[0];
|
||||
try p.parseExprOrBindings(.comma, &errors, item);
|
||||
items_list.items.len += 1;
|
||||
|
||||
if (is_spread) {
|
||||
item = p.newExpr(E.Spread{ .value = item }, loc);
|
||||
item.* = p.newExpr(E.Spread{ .value = item.* }, loc);
|
||||
}
|
||||
|
||||
// Skip over types
|
||||
@@ -359,11 +364,9 @@ pub fn Parse(
|
||||
// There may be a "=" after the type (but not after an "as" cast)
|
||||
if (is_typescript_enabled and p.lexer.token == .t_equals and !p.forbid_suffix_after_as_loc.eql(p.lexer.loc())) {
|
||||
try p.lexer.next();
|
||||
item = Expr.assign(item, try p.parseExpr(.comma));
|
||||
item.* = Expr.assign(item.*, try p.parseExpr(.comma));
|
||||
}
|
||||
|
||||
items_list.append(item) catch unreachable;
|
||||
|
||||
if (p.lexer.token != .t_comma) {
|
||||
break;
|
||||
}
|
||||
@@ -675,7 +678,7 @@ pub fn Parse(
|
||||
try p.lexer.next();
|
||||
|
||||
const raw2 = p.lexer.raw();
|
||||
const value = if (p.lexer.token == .t_identifier and strings.eqlComptime(raw2, "using")) value: {
|
||||
var value = if (p.lexer.token == .t_identifier and strings.eqlComptime(raw2, "using")) value: {
|
||||
// const using_loc = p.saveExprCommentsHere();
|
||||
const using_range = p.lexer.range();
|
||||
try p.lexer.next();
|
||||
@@ -711,13 +714,15 @@ pub fn Parse(
|
||||
if (p.lexer.token == .t_asterisk_asterisk) {
|
||||
try p.lexer.unexpected();
|
||||
}
|
||||
const expr = p.newExpr(
|
||||
E.Await{ .value = try p.parseSuffix(value, .prefix, null, .none) },
|
||||
try p.parseSuffix(&value, .prefix, null, .none);
|
||||
var expr = p.newExpr(
|
||||
E.Await{ .value = value },
|
||||
token_range.loc,
|
||||
);
|
||||
try p.parseSuffix(&expr, .lowest, null, .none);
|
||||
return ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.expr = try p.parseSuffix(expr, .lowest, null, .none),
|
||||
.expr = expr,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
@@ -730,12 +735,13 @@ pub fn Parse(
|
||||
|
||||
// Parse the remainder of this expression that starts with an identifier
|
||||
const ref = try p.storeNameInRef(raw);
|
||||
const expr = p.newExpr(E.Identifier{ .ref = ref }, token_range.loc);
|
||||
return ExprOrLetStmt{
|
||||
var result = ExprOrLetStmt{
|
||||
.stmt_or_expr = js_ast.StmtOrExpr{
|
||||
.expr = try p.parseSuffix(expr, .lowest, null, .none),
|
||||
.expr = p.newExpr(E.Identifier{ .ref = ref }, token_range.loc),
|
||||
},
|
||||
};
|
||||
try p.parseSuffix(&result.stmt_or_expr.expr, .lowest, null, .none);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn parseBinding(p: *P, comptime opts: ParseBindingOptions) anyerror!Binding {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -24,7 +24,9 @@ pub fn ParseTypescript(
|
||||
// }
|
||||
//
|
||||
// This matches the behavior of the TypeScript compiler.
|
||||
try decorators.append(try p.parseExprWithFlags(.new, Expr.EFlags.ts_decorator));
|
||||
try decorators.ensureUnusedCapacity(1);
|
||||
try p.parseExprWithFlags(.new, Expr.EFlags.ts_decorator, &decorators.unusedCapacitySlice()[0]);
|
||||
decorators.items.len += 1;
|
||||
}
|
||||
|
||||
return decorators.items;
|
||||
|
||||
@@ -25,15 +25,12 @@ pub fn VisitExpr(
|
||||
p.log.addError(p.source, expr.loc, "Invalid assignment target") catch unreachable;
|
||||
}
|
||||
|
||||
// Output.print("\nVisit: {s} - {d}\n", .{ @tagName(expr.data), expr.loc.start });
|
||||
switch (@as(Expr.Tag, expr.data)) {
|
||||
inline else => |tag| {
|
||||
if (@hasDecl(visitors, @tagName(tag))) {
|
||||
return @field(visitors, @tagName(tag))(p, expr, in);
|
||||
}
|
||||
return expr;
|
||||
},
|
||||
}
|
||||
return switch (@as(Expr.Tag, expr.data)) {
|
||||
inline else => |tag| if (comptime @hasDecl(visitors, @tagName(tag)))
|
||||
@field(visitors, @tagName(tag))(p, expr, in)
|
||||
else
|
||||
expr,
|
||||
};
|
||||
}
|
||||
|
||||
const visitors = struct {
|
||||
|
||||
@@ -1034,7 +1034,7 @@ pub const FetchTasklet = struct {
|
||||
pub fn get(
|
||||
allocator: std.mem.Allocator,
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
fetch_options: FetchOptions,
|
||||
fetch_options: *const FetchOptions,
|
||||
promise: jsc.JSPromise.Strong,
|
||||
) !*FetchTasklet {
|
||||
var jsc_vm = globalThis.bunVM();
|
||||
@@ -1270,7 +1270,7 @@ pub const FetchTasklet = struct {
|
||||
pub fn queue(
|
||||
allocator: std.mem.Allocator,
|
||||
global: *JSGlobalObject,
|
||||
fetch_options: FetchOptions,
|
||||
fetch_options: *const FetchOptions,
|
||||
promise: jsc.JSPromise.Strong,
|
||||
) !*FetchTasklet {
|
||||
http.HTTPThread.init(&.{});
|
||||
@@ -2625,7 +2625,7 @@ pub fn Bun__fetch_(
|
||||
_ = FetchTasklet.queue(
|
||||
allocator,
|
||||
globalThis,
|
||||
.{
|
||||
&.{
|
||||
.method = method,
|
||||
.url = url,
|
||||
.headers = headers orelse Headers{
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user