From b5b033452f1651eb17ec0a0bf64ce886cac35291 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:57:49 -0800 Subject: [PATCH] fix 14540 (#15498) --- src/js_lexer.zig | 29 +++++++++++++++---- src/js_parser.zig | 27 ++++++++++------- .../bundler/transpiler/fixtures/9-comments.ts | 10 +++++++ test/bundler/transpiler/transpiler.test.js | 17 ++++++++++- 4 files changed, 66 insertions(+), 17 deletions(-) create mode 100644 test/bundler/transpiler/fixtures/9-comments.ts diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 0a4eb1a703..bcb354401f 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -273,6 +273,25 @@ fn NewLexer_( // } } + pub fn restore(this: *LexerType, original: *const LexerType) void { + const all_comments = this.all_comments; + const comments_to_preserve_before = this.comments_to_preserve_before; + const temp_buffer_u16 = this.temp_buffer_u16; + this.* = original.*; + + // make sure pointers are valid + this.all_comments = all_comments; + this.comments_to_preserve_before = comments_to_preserve_before; + this.temp_buffer_u16 = temp_buffer_u16; + + bun.debugAssert(all_comments.items.len >= original.all_comments.items.len); + bun.debugAssert(comments_to_preserve_before.items.len >= original.comments_to_preserve_before.items.len); + bun.debugAssert(temp_buffer_u16.items.len == 0 and original.temp_buffer_u16.items.len == 0); + + this.all_comments.items.len = original.all_comments.items.len; + this.comments_to_preserve_before.items.len = original.comments_to_preserve_before.items.len; + } + /// Look ahead at the next n codepoints without advancing the iterator. /// If fewer than n codepoints are available, then return the remainder of the string. fn peek(it: *LexerType, n: usize) string { @@ -378,7 +397,7 @@ fn NewLexer_( // 1-3 digit octal var is_bad = false; var value: i64 = c2 - '0'; - var restore = iter; + var prev = iter; _ = iterator.next(&iter) or { if (value == 0) { @@ -395,7 +414,7 @@ fn NewLexer_( switch (c3) { '0'...'7' => { value = value * 8 + c3 - '0'; - restore = iter; + prev = iter; _ = iterator.next(&iter) or return lexer.syntaxError(); const c4 = iter.c; @@ -405,14 +424,14 @@ fn NewLexer_( if (temp < 256) { value = temp; } else { - iter = restore; + iter = prev; } }, '8', '9' => { is_bad = true; }, else => { - iter = restore; + iter = prev; }, } }, @@ -420,7 +439,7 @@ fn NewLexer_( is_bad = true; }, else => { - iter = restore; + iter = prev; }, } diff --git a/src/js_parser.zig b/src/js_parser.zig index 8ed4033187..812ff3580f 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -780,7 +780,7 @@ pub const TypeScript = struct { }; pub fn isTSArrowFnJSX(p: anytype) !bool { - var oldLexer = std.mem.toBytes(p.lexer); + const old_lexer = p.lexer; try p.lexer.next(); // Look ahead to see if this should be an arrow function instead @@ -800,7 +800,7 @@ pub const TypeScript = struct { } // Restore the lexer - p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &oldLexer); + p.lexer.restore(&old_lexer); return is_ts_arrow_fn; } @@ -870,11 +870,13 @@ pub const TypeScript = struct { } fn lookAheadNextTokenIsOpenParenOrLessThanOrDot(p: anytype) bool { - var old_lexer = std.mem.toBytes(p.lexer); + const old_lexer = p.lexer; const old_log_disabled = p.lexer.is_log_disabled; p.lexer.is_log_disabled = true; - defer p.lexer.is_log_disabled = old_log_disabled; - defer p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer); + defer { + p.lexer.restore(&old_lexer); + p.lexer.is_log_disabled = old_log_disabled; + } p.lexer.next() catch {}; return switch (p.lexer.token) { @@ -12813,7 +12815,7 @@ fn NewParser_( pub const Backtracking = struct { pub inline fn lexerBacktracker(p: *P, func: anytype, comptime ReturnType: type) ReturnType { p.markTypeScriptOnly(); - var old_lexer = std.mem.toBytes(p.lexer); + const old_lexer = p.lexer; const old_log_disabled = p.lexer.is_log_disabled; p.lexer.is_log_disabled = true; defer p.lexer.is_log_disabled = old_log_disabled; @@ -12838,7 +12840,7 @@ fn NewParser_( }; if (backtrack) { - p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer); + p.lexer.restore(&old_lexer); if (comptime FnReturnType == anyerror!bool) { return false; @@ -12858,7 +12860,7 @@ fn NewParser_( pub inline fn lexerBacktrackerWithArgs(p: *P, func: anytype, args: anytype, comptime ReturnType: type) ReturnType { p.markTypeScriptOnly(); - var old_lexer = std.mem.toBytes(p.lexer); + const old_lexer = p.lexer; const old_log_disabled = p.lexer.is_log_disabled; p.lexer.is_log_disabled = true; @@ -12879,7 +12881,7 @@ fn NewParser_( }; if (backtrack) { - p.lexer = std.mem.bytesToValue(@TypeOf(p.lexer), &old_lexer); + p.lexer.restore(&old_lexer); if (comptime FnReturnType == anyerror!bool) { return false; } @@ -15426,7 +15428,10 @@ fn NewParser_( p.lexer.preserve_all_comments_before = true; try p.lexer.expect(.t_open_paren); - const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice(); + + // const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice(); + p.lexer.comments_to_preserve_before.clearRetainingCapacity(); + p.lexer.preserve_all_comments_before = false; const value = try p.parseExpr(.comma); @@ -15464,7 +15469,7 @@ fn NewParser_( } } - _ = comments; // TODO: leading_interior comments + // _ = comments; // TODO: leading_interior comments return p.newExpr(E.Import{ .expr = value, diff --git a/test/bundler/transpiler/fixtures/9-comments.ts b/test/bundler/transpiler/fixtures/9-comments.ts new file mode 100644 index 0000000000..b730d705fc --- /dev/null +++ b/test/bundler/transpiler/fixtures/9-comments.ts @@ -0,0 +1,10 @@ +var a = 0; +// 1 +// 2 +// 3 +// 4 +// 5 +// 6 +// 7 +// 8 +if (a < 9 /* 9 */) console.log("success!"); diff --git a/test/bundler/transpiler/transpiler.test.js b/test/bundler/transpiler/transpiler.test.js index 7bb5bb1987..8757b968ae 100644 --- a/test/bundler/transpiler/transpiler.test.js +++ b/test/bundler/transpiler/transpiler.test.js @@ -1,5 +1,6 @@ import { describe, expect, it } from "bun:test"; -import { hideFromStackTrace } from "harness"; +import { hideFromStackTrace, bunExe, bunEnv } from "harness"; +import { join } from "path"; describe("Bun.Transpiler", () => { const transpiler = new Bun.Transpiler({ @@ -3426,3 +3427,17 @@ describe("await can only be used inside an async function message", () => { assertError(`const foo = () => await bar();`, false); }); }); + +it("does not crash with 9 comments and typescript type skipping", () => { + const cmd = [bunExe(), "build", "--minify-identifiers", join(import.meta.dir, "fixtures", "9-comments.ts")]; + const { stdout, stderr, exitCode } = Bun.spawnSync({ + cmd, + stdout: "pipe", + stderr: "pipe", + env: bunEnv, + }); + + expect(stderr.toString()).toBe(""); + expect(stdout.toString()).toContain("success!"); + expect(exitCode).toBe(0); +});