diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index ccbb0842a4..dcc5639bd0 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -4862,9 +4862,14 @@ pub const LinkerContext = struct { // does it have a JS AST? if (!(id < import_records_list.len)) continue; - _ = this.validateTLA(id); - + const tla_keywords = this.parse_graph.ast.items(.top_level_await_keyword); + const tla_checks = this.parse_graph.ast.items(.tla_check); + const input_files = this.parse_graph.input_files.items(.source); + const meta_flags = this.graph.meta.items(.flags); const import_records: []ImportRecord = import_records_list[id].slice(); + + _ = this.validateTLA(id, tla_keywords, tla_checks, input_files, import_records, meta_flags); + for (import_records) |record| { if (!record.source_index.isValid()) { continue; @@ -7548,26 +7553,35 @@ pub const LinkerContext = struct { return hasher.digest(); } - pub fn validateTLA(c: *LinkerContext, source_index: Index.Int) js_ast.TlaCheck { - var result_tla_check: *js_ast.TlaCheck = &c.parse_graph.ast.items(.tla_check).ptr[source_index]; - const result = c.parse_graph.ast.get(source_index); - const input_files = c.parse_graph.input_files.items(.source); + pub fn validateTLA( + c: *LinkerContext, + source_index: Index.Int, + tla_keywords: []Logger.Range, + tla_checks: []js_ast.TlaCheck, + input_files: []Logger.Source, + import_records: []ImportRecord, + meta_flags: []JSMeta.Flags, + ) js_ast.TlaCheck { + var result_tla_check: *js_ast.TlaCheck = &tla_checks[source_index]; + const result_top_level_await_keyword = tla_keywords[source_index]; if (result_tla_check.depth == 0) { result_tla_check.depth = 1; - if (result.top_level_await_keyword.len > 0) { + if (result_top_level_await_keyword.len > 0) { result_tla_check.parent = source_index; } - for (result.import_records.slice(), 0..) |record, import_record_index| { + var notes = std.ArrayList(Logger.Data).init(c.allocator); + defer notes.deinit(); + for (import_records, 0..) |record, import_record_index| { if (Index.isValid(record.source_index) and (record.kind == .require or record.kind == .stmt)) { - const parent = c.validateTLA(record.source_index.get()); + const parent = c.validateTLA(record.source_index.get(), tla_keywords, tla_checks, input_files, import_records, meta_flags); if (Index.isInvalid(Index.init(parent.parent))) { continue; } // Follow any import chains - if (record.kind == .stmt and (Index.isInvalid(Index.init(result.tla_check.parent)) or parent.depth < result.tla_check.depth)) { + if (record.kind == .stmt and (Index.isInvalid(Index.init(result_tla_check.parent)) or parent.depth < result_tla_check.depth)) { result_tla_check.depth = parent.depth + 1; result_tla_check.parent = record.source_index.get(); result_tla_check.import_record_index = @intCast(import_record_index); @@ -7576,16 +7590,16 @@ pub const LinkerContext = struct { // Require of a top-level await chain is forbidden if (record.kind == .require) { - var notes = std.ArrayList(Logger.Data).init(c.allocator); var tla_pretty_path: string = ""; var other_source_index = record.source_index.get(); // Build up a chain of notes for all of the imports while (true) { - const parent_result = c.parse_graph.ast.get(other_source_index); + const parent_result_tla_keyword = tla_keywords[other_source_index]; + const parent_tla_check = tla_checks[other_source_index]; const parent_source_index = other_source_index; - if (parent_result.top_level_await_keyword.len > 0) { + if (parent_result_tla_keyword.len > 0) { tla_pretty_path = input_files[other_source_index].path.pretty; notes.append(Logger.Data{ .text = std.fmt.allocPrint(c.allocator, "The top-level await in {s} is here:", .{tla_pretty_path}) catch bun.outOfMemory(), @@ -7593,14 +7607,14 @@ pub const LinkerContext = struct { break; } - if (!Index.isValid(Index.init(parent_result.tla_check.parent))) { + if (!Index.isValid(Index.init(parent_tla_check.parent))) { notes.append(Logger.Data{ .text = "unexpected invalid index", }) catch bun.outOfMemory(); break; } - other_source_index = parent_result.tla_check.parent; + other_source_index = parent_tla_check.parent; notes.append(Logger.Data{ .text = std.fmt.allocPrint(c.allocator, "The file {s} imports the file {s} here:", .{ @@ -7610,14 +7624,16 @@ pub const LinkerContext = struct { }) catch bun.outOfMemory(); } - const imported_pretty_path = input_files[source_index].path.pretty; + const source: *Logger.Source = &input_files[source_index]; + const imported_pretty_path = source.path.pretty; const text: string = if (strings.eql(imported_pretty_path, tla_pretty_path)) std.fmt.allocPrint(c.allocator, "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path}) catch bun.outOfMemory() else std.fmt.allocPrint(c.allocator, "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path}) catch bun.outOfMemory(); - const source: Logger.Source = input_files[source_index]; - c.log.addRangeErrorWithNotes(&source, record.range, text, notes.items) catch bun.outOfMemory(); + c.log.addRangeErrorWithNotes(source, record.range, text, notes.items) catch bun.outOfMemory(); + + notes.resize(0) catch bun.outOfMemory(); } } } @@ -7626,7 +7642,7 @@ pub const LinkerContext = struct { // async. This happens when you call "import()" on this module and code // splitting is off. if (Index.isValid(Index.init(result_tla_check.parent))) { - c.graph.meta.items(.flags)[source_index].is_async_or_has_async_dependency = true; + meta_flags[source_index].is_async_or_has_async_dependency = true; } }