This commit is contained in:
Dylan Conway
2024-12-02 14:57:49 -08:00
committed by snwy
parent 3e82010aa7
commit b5b033452f
4 changed files with 66 additions and 17 deletions

View File

@@ -273,6 +273,25 @@ fn NewLexer_(
// }
}
pub fn restore(this: *LexerType, original: *const LexerType) void {
const all_comments = this.all_comments;
const comments_to_preserve_before = this.comments_to_preserve_before;
const temp_buffer_u16 = this.temp_buffer_u16;
this.* = original.*;
// make sure pointers are valid
this.all_comments = all_comments;
this.comments_to_preserve_before = comments_to_preserve_before;
this.temp_buffer_u16 = temp_buffer_u16;
bun.debugAssert(all_comments.items.len >= original.all_comments.items.len);
bun.debugAssert(comments_to_preserve_before.items.len >= original.comments_to_preserve_before.items.len);
bun.debugAssert(temp_buffer_u16.items.len == 0 and original.temp_buffer_u16.items.len == 0);
this.all_comments.items.len = original.all_comments.items.len;
this.comments_to_preserve_before.items.len = original.comments_to_preserve_before.items.len;
}
/// Look ahead at the next n codepoints without advancing the iterator.
/// If fewer than n codepoints are available, then return the remainder of the string.
fn peek(it: *LexerType, n: usize) string {
@@ -378,7 +397,7 @@ fn NewLexer_(
// 1-3 digit octal
var is_bad = false;
var value: i64 = c2 - '0';
var restore = iter;
var prev = iter;
_ = iterator.next(&iter) or {
if (value == 0) {
@@ -395,7 +414,7 @@ fn NewLexer_(
switch (c3) {
'0'...'7' => {
value = value * 8 + c3 - '0';
restore = iter;
prev = iter;
_ = iterator.next(&iter) or return lexer.syntaxError();
const c4 = iter.c;
@@ -405,14 +424,14 @@ fn NewLexer_(
if (temp < 256) {
value = temp;
} else {
iter = restore;
iter = prev;
}
},
'8', '9' => {
is_bad = true;
},
else => {
iter = restore;
iter = prev;
},
}
},
@@ -420,7 +439,7 @@ fn NewLexer_(
is_bad = true;
},
else => {
iter = restore;
iter = prev;
},
}

View File

@@ -780,7 +780,7 @@ pub const TypeScript = struct {
};
pub fn isTSArrowFnJSX(p: anytype) !bool {
var oldLexer = std.mem.toBytes(p.lexer);
const old_lexer = p.lexer;
try p.lexer.next();
// Look ahead to see if this should be an arrow function instead
@@ -800,7 +800,7 @@ pub const TypeScript = struct {
}
// Restore the lexer
p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &oldLexer);
p.lexer.restore(&old_lexer);
return is_ts_arrow_fn;
}
@@ -870,11 +870,13 @@ pub const TypeScript = struct {
}
fn lookAheadNextTokenIsOpenParenOrLessThanOrDot(p: anytype) bool {
var old_lexer = std.mem.toBytes(p.lexer);
const old_lexer = p.lexer;
const old_log_disabled = p.lexer.is_log_disabled;
p.lexer.is_log_disabled = true;
defer p.lexer.is_log_disabled = old_log_disabled;
defer p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer);
defer {
p.lexer.restore(&old_lexer);
p.lexer.is_log_disabled = old_log_disabled;
}
p.lexer.next() catch {};
return switch (p.lexer.token) {
@@ -12813,7 +12815,7 @@ fn NewParser_(
pub const Backtracking = struct {
pub inline fn lexerBacktracker(p: *P, func: anytype, comptime ReturnType: type) ReturnType {
p.markTypeScriptOnly();
var old_lexer = std.mem.toBytes(p.lexer);
const old_lexer = p.lexer;
const old_log_disabled = p.lexer.is_log_disabled;
p.lexer.is_log_disabled = true;
defer p.lexer.is_log_disabled = old_log_disabled;
@@ -12838,7 +12840,7 @@ fn NewParser_(
};
if (backtrack) {
p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer);
p.lexer.restore(&old_lexer);
if (comptime FnReturnType == anyerror!bool) {
return false;
@@ -12858,7 +12860,7 @@ fn NewParser_(
pub inline fn lexerBacktrackerWithArgs(p: *P, func: anytype, args: anytype, comptime ReturnType: type) ReturnType {
p.markTypeScriptOnly();
var old_lexer = std.mem.toBytes(p.lexer);
const old_lexer = p.lexer;
const old_log_disabled = p.lexer.is_log_disabled;
p.lexer.is_log_disabled = true;
@@ -12879,7 +12881,7 @@ fn NewParser_(
};
if (backtrack) {
p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer);
p.lexer.restore(&old_lexer);
if (comptime FnReturnType == anyerror!bool) {
return false;
}
@@ -15426,7 +15428,10 @@ fn NewParser_(
p.lexer.preserve_all_comments_before = true;
try p.lexer.expect(.t_open_paren);
const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice();
// const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice();
p.lexer.comments_to_preserve_before.clearRetainingCapacity();
p.lexer.preserve_all_comments_before = false;
const value = try p.parseExpr(.comma);
@@ -15464,7 +15469,7 @@ fn NewParser_(
}
}
_ = comments; // TODO: leading_interior comments
// _ = comments; // TODO: leading_interior comments
return p.newExpr(E.Import{
.expr = value,

View File

@@ -0,0 +1,10 @@
var a = 0;
// 1
// 2
// 3
// 4
// 5
// 6
// 7
// 8
if (a < 9 /* 9 */) console.log("success!");

View File

@@ -1,5 +1,6 @@
import { describe, expect, it } from "bun:test";
import { hideFromStackTrace } from "harness";
import { hideFromStackTrace, bunExe, bunEnv } from "harness";
import { join } from "path";
describe("Bun.Transpiler", () => {
const transpiler = new Bun.Transpiler({
@@ -3426,3 +3427,17 @@ describe("await can only be used inside an async function message", () => {
assertError(`const foo = () => await bar();`, false);
});
});
it("does not crash with 9 comments and typescript type skipping", () => {
const cmd = [bunExe(), "build", "--minify-identifiers", join(import.meta.dir, "fixtures", "9-comments.ts")];
const { stdout, stderr, exitCode } = Bun.spawnSync({
cmd,
stdout: "pipe",
stderr: "pipe",
env: bunEnv,
});
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("success!");
expect(exitCode).toBe(0);
});