Compare commits

...

15 Commits

Author SHA1 Message Date
snoglobe
70b5e9900c fix log note 2024-09-30 17:42:02 -07:00
snoglobe
6b85ae4848 don't dealloc (c.allocator is an arena) 2024-09-30 13:45:34 -07:00
snoglobe
8984e218d1 tests 2024-09-30 13:45:34 -07:00
snwy
282fbd76cd Update src/bundler/bundle_v2.zig
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-09-27 20:16:18 -07:00
snoglobe
bb81cd0341 minor things i missed 2024-09-27 20:10:14 -07:00
snwy
d322a246b4 Update src/bundler/bundle_v2.zig
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-09-27 20:06:10 -07:00
snoglobe
42b2476ca2 more tests 2024-09-27 20:03:44 -07:00
snoglobe
b6add9c531 - don't get whole AST
- optimize usages of ast.items
- don't make stack copy of source
- pass slices to validateTLA
- don't leak memory on notes
2024-09-27 19:56:00 -07:00
snoglobe
6928acbb34 move input_files.items(.source) out of loops 2024-09-27 18:14:36 -07:00
snwy
202f1cd178 Merge branch 'main' into snoglobe/tla 2024-09-27 18:03:26 -07:00
snoglobe
66d6301e49 renaming + better loop 2024-09-27 18:01:19 -07:00
snoglobe
b3ea2efb0e fix tests, don't return empty chunk when handling error in link 2024-09-27 16:31:28 -07:00
snoglobe
a187e39457 un-todo relevant tests 2024-09-27 16:08:55 -07:00
snoglobe
79a5804e40 validateTLA now works 2024-09-27 16:05:47 -07:00
snoglobe
6932e09b0a top-level await 2024-09-25 18:20:37 -07:00
4 changed files with 239 additions and 6 deletions

View File

@@ -4249,7 +4249,7 @@ pub const LinkerContext = struct {
// Stop now if there were errors
if (this.log.hasErrors()) {
return &[_]Chunk{};
return error.BuildFailed;
}
if (comptime FeatureFlags.help_catch_memory_issues) {
@@ -4846,6 +4846,10 @@ pub const LinkerContext = struct {
var named_imports: []js_ast.Ast.NamedImports = this.graph.ast.items(.named_imports);
var flags: []JSMeta.Flags = this.graph.meta.items(.flags);
const tla_keywords = this.parse_graph.ast.items(.top_level_await_keyword);
const tla_checks = this.parse_graph.ast.items(.tla_check);
const input_files = this.parse_graph.input_files.items(.source);
const export_star_import_records: [][]u32 = this.graph.ast.items(.export_star_import_records);
const exports_refs: []Ref = this.graph.ast.items(.exports_ref);
const module_refs: []Ref = this.graph.ast.items(.module_ref);
@@ -4863,6 +4867,9 @@ pub const LinkerContext = struct {
if (!(id < import_records_list.len)) continue;
const import_records: []ImportRecord = import_records_list[id].slice();
_ = this.validateTLA(id, tla_keywords, tla_checks, input_files, import_records, flags);
for (import_records) |record| {
if (!record.source_index.isValid()) {
continue;
@@ -7546,6 +7553,97 @@ pub const LinkerContext = struct {
return hasher.digest();
}
pub fn validateTLA(
c: *LinkerContext,
source_index: Index.Int,
tla_keywords: []Logger.Range,
tla_checks: []js_ast.TlaCheck,
input_files: []Logger.Source,
import_records: []ImportRecord,
meta_flags: []JSMeta.Flags,
) js_ast.TlaCheck {
var result_tla_check: *js_ast.TlaCheck = &tla_checks[source_index];
if (result_tla_check.depth == 0) {
result_tla_check.depth = 1;
if (tla_keywords[source_index].len > 0) {
result_tla_check.parent = source_index;
}
for (import_records, 0..) |record, import_record_index| {
if (Index.isValid(record.source_index) and (record.kind == .require or record.kind == .stmt)) {
const parent = c.validateTLA(record.source_index.get(), tla_keywords, tla_checks, input_files, import_records, meta_flags);
if (Index.isInvalid(Index.init(parent.parent))) {
continue;
}
// Follow any import chains
if (record.kind == .stmt and (Index.isInvalid(Index.init(result_tla_check.parent)) or parent.depth < result_tla_check.depth)) {
result_tla_check.depth = parent.depth + 1;
result_tla_check.parent = record.source_index.get();
result_tla_check.import_record_index = @intCast(import_record_index);
continue;
}
// Require of a top-level await chain is forbidden
if (record.kind == .require) {
var notes = std.ArrayList(Logger.Data).init(c.allocator);
var tla_pretty_path: string = "";
var other_source_index = record.source_index.get();
// Build up a chain of notes for all of the imports
while (true) {
const parent_result_tla_keyword = tla_keywords[other_source_index];
const parent_tla_check = tla_checks[other_source_index];
const parent_source_index = other_source_index;
if (parent_result_tla_keyword.len > 0) {
tla_pretty_path = input_files[other_source_index].path.pretty;
notes.append(Logger.rangeData(&input_files[other_source_index], parent_result_tla_keyword, std.fmt.allocPrint(c.allocator, "The top-level await in {s} is here:", .{tla_pretty_path}) catch bun.outOfMemory())) catch bun.outOfMemory();
break;
}
if (!Index.isValid(Index.init(parent_tla_check.parent))) {
notes.append(Logger.Data{
.text = "unexpected invalid index",
}) catch bun.outOfMemory();
break;
}
other_source_index = parent_tla_check.parent;
notes.append(Logger.Data{
.text = std.fmt.allocPrint(c.allocator, "The file {s} imports the file {s} here:", .{
input_files[parent_source_index].path.pretty,
input_files[other_source_index].path.pretty,
}) catch bun.outOfMemory(),
}) catch bun.outOfMemory();
}
const source: *const Logger.Source = &input_files[source_index];
const imported_pretty_path = source.path.pretty;
const text: string = if (strings.eql(imported_pretty_path, tla_pretty_path))
std.fmt.allocPrint(c.allocator, "This require call is not allowed because the imported file \"{s}\" contains a top-level await", .{imported_pretty_path}) catch bun.outOfMemory()
else
std.fmt.allocPrint(c.allocator, "This require call is not allowed because the transitive dependency \"{s}\" contains a top-level await", .{tla_pretty_path}) catch bun.outOfMemory();
c.log.addRangeErrorWithNotes(source, record.range, text, notes.items) catch bun.outOfMemory();
}
}
}
// Make sure that if we wrap this module in a closure, the closure is also
// async. This happens when you call "import()" on this module and code
// splitting is off.
if (Index.isValid(Index.init(result_tla_check.parent))) {
meta_flags[source_index].is_async_or_has_async_dependency = true;
}
}
return result_tla_check.*;
}
pub fn generateEntryPointTailJS(
c: *LinkerContext,
toCommonJSRef: Ref,
@@ -9484,6 +9582,8 @@ pub const LinkerContext = struct {
const trace = tracer(@src(), "generateChunksInParallel");
defer trace.end();
bun.assert(chunks.len > 0);
{
debug(" START {d} renamers", .{chunks.len});
defer debug(" DONE {d} renamers", .{chunks.len});

View File

@@ -6915,6 +6915,12 @@ pub const Ast = struct {
}
};
pub const TlaCheck = struct {
depth: u32 = 0,
parent: Index.Int = Index.invalid.get(),
import_record_index: Index.Int = Index.invalid.get(),
};
/// Like Ast but slimmer and for bundling only.
///
/// On Linux, the hottest function in the bundler is:
@@ -6945,6 +6951,8 @@ pub const BundledAst = struct {
module_ref: Ref = Ref.None,
wrapper_ref: Ref = Ref.None,
require_ref: Ref = Ref.None,
top_level_await_keyword: logger.Range,
tla_check: TlaCheck = .{},
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
@@ -7014,6 +7022,7 @@ pub const BundledAst = struct {
.module_ref = this.module_ref,
.wrapper_ref = this.wrapper_ref,
.require_ref = this.require_ref,
.top_level_await_keyword = this.top_level_await_keyword,
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
@@ -7066,7 +7075,7 @@ pub const BundledAst = struct {
.module_ref = ast.module_ref,
.wrapper_ref = ast.wrapper_ref,
.require_ref = ast.require_ref,
.top_level_await_keyword = ast.top_level_await_keyword,
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.

View File

@@ -23527,7 +23527,7 @@ fn NewParser_(
.uses_require_ref = p.runtime_imports.__require != null and
p.symbols.items[p.runtime_imports.__require.?.ref.inner_index].use_count_estimate > 0,
.commonjs_module_exports_assigned_deoptimized = p.commonjs_module_exports_assigned_deoptimized,
// .top_Level_await_keyword = p.top_level_await_keyword,
.top_level_await_keyword = p.top_level_await_keyword,
.commonjs_named_exports = p.commonjs_named_exports,
.has_commonjs_export_names = p.has_commonjs_export_names,

View File

@@ -3495,7 +3495,6 @@ describe("bundler", () => {
bundling: false,
});
itBundled("default/TopLevelAwaitForbiddenRequire", {
todo: true,
files: {
"/entry.js": /* js */ `
require('./a')
@@ -3513,12 +3512,12 @@ describe("bundler", () => {
"/entry.js": [
'This require call is not allowed because the transitive dependency "c.js" contains a top-level await',
'This require call is not allowed because the transitive dependency "c.js" contains a top-level await',
'This require call is not allowed because the transitive dependency "entry.js" contains a top-level await',
'This require call is not allowed because the transitive dependency "c.js" contains a top-level await',
'This require call is not allowed because the imported file "entry.js" contains a top-level await',
],
},
});
itBundled("default/TopLevelAwaitAllowedImportWithoutSplitting", {
todo: true,
files: {
"/entry.js": /* js */ `
import('./a')
@@ -3536,6 +3535,131 @@ describe("bundler", () => {
stdout: "0\n1",
},
});
itBundled("default/TopLevelAwaitImport", {
files: {
"/entry.js": /* js */ `
const { a } = await import('./a.js');
console.log(a);
`,
"/a.js": /* js */ `
async function five() {
return 5;
}
export const a = await five();
`,
},
format: "esm",
run: {
stdout: "5",
},
});
itBundled("default/TopLevelAwaitWithStaticImport", {
// Test static import of a module that uses top-level await
files: {
"/entry.js": `
import { a } from './a.js';
console.log('Entry', a);
`,
"/a.js": `
async function getValue() {
return await Promise.resolve('value from a');
}
export const a = await getValue();
console.log('a.js loaded');
`,
},
format: "esm",
run: {
stdout: "a.js loaded\nEntry value from a",
},
});
itBundled("default/TopLevelAwaitWithNestedDynamicImport", {
// Test nested dynamic imports with top-level await
files: {
"/entry.js": `
console.log('Start Entry');
const res = await import('./a.js');
console.log('Entry', res.a);
`,
"/a.js": `
console.log('Start a.js');
const { b } = await import('./b.js');
export const a = 'a.js plus ' + b;
`,
"/b.js": `
console.log('Start b.js');
export const b = 'value from b.js';
`,
},
format: "esm",
run: {
stdout: `Start Entry
Start a.js
Start b.js
Entry a.js plus value from b.js`,
},
});
itBundled("default/TopLevelAwaitWithNestedRequire", {
// Test nested dynamic imports with top-level await
files: {
"/entry.js": `
console.log('Start Entry');
const res = await import('./a.js');
console.log('Entry', res.a);
`,
"/a.js": `
console.log('Start a.js');
const { b } = require('./b.js');
export const a = 'a.js plus ' + b;
`,
"/b.js": `
console.log('Start b.js');
export const b = 'value from b.js';
`,
},
format: "esm",
run: {
stdout: `Start Entry
Start a.js
Start b.js
Entry a.js plus value from b.js`,
},
});
itBundled("default/TopLevelAwaitWithNestedImportAndRequire", {
// Test nested dynamic imports with top-level await
files: {
"/entry.js": `
console.log('Start Entry');
const res = await import('./a.js');
console.log('Entry', res.a);
`,
"/a.js": `
console.log('Start a.js');
const { b } = require('./b.js');
async function getValue() {
return 'value from a.js plus ' + b;
}
export const a = await getValue();
`,
"/b.js": `
console.log('Start b.js');
import { c } from './c.js';
export const b = 'value from b.js plus ' + c;
`,
"/c.js": `
console.log('Start c.js');
async function getValue() {
return 'value from c.js';
}
export const c = await getValue();
`,
},
format: "esm",
bundleErrors: {
"/a.js": ['This require call is not allowed because the transitive dependency "c.js" contains a top-level await'],
},
});
itBundled("default/TopLevelAwaitAllowedImportWithSplitting", {
files: {
"/entry.js": /* js */ `