Files
bun.sh/src/ast/parseImportExport.zig
pfg 05d0475c6c Update to zig 0.15.2 (#24204)
Fixes ENG-21287

Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`

|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|

Runtime performance:

- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)

Improvements:

- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests

Still an issue:

- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed

Follow-up:

- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully

Files that add lines:

```
649     src/deprecated.zig
167     scripts/pack-codegen-for-zig-team.ts
54      scripts/cleartrace-impl.js
46      scripts/cleartrace.ts
43      src/windows.zig
18      src/fs.zig
17      src/bun.js/ConsoleObject.zig
16      src/output.zig
12      src/bun.js/test/debug.zig
12      src/bun.js/node/node_fs.zig
8       src/env_loader.zig
7       src/css/printer.zig
7       src/cli/init_command.zig
7       src/bun.js/node.zig
6       src/string/escapeRegExp.zig
6       src/install/PnpmMatcher.zig
5       src/bun.js/webcore/Blob.zig
4       src/crash_handler.zig
4       src/bun.zig
3       src/install/lockfile/bun.lock.zig
3       src/cli/update_interactive_command.zig
3       src/cli/pack_command.zig
3       build.zig
2       src/Progress.zig
2       src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2       src/css/small_list.zig
2       src/bun.js/webcore/prompt.zig
1       test/internal/ban-words.test.ts
1       test/internal/ban-limits.json
1       src/watcher/WatcherTrace.zig
1       src/transpiler.zig
1       src/shell/builtin/cp.zig
1       src/js_printer.zig
1       src/io/PipeReader.zig
1       src/install/bin.zig
1       src/css/selectors/selector.zig
1       src/cli/run_command.zig
1       src/bun.js/RuntimeTranspilerStore.zig
1       src/bun.js/bindings/JSRef.zig
1       src/bake/DevServer.zig
```

Files that remove lines:

```
-1      src/test/recover.zig
-1      src/sql/postgres/SocketMonitor.zig
-1      src/sql/mysql/MySQLRequestQueue.zig
-1      src/sourcemap/CodeCoverage.zig
-1      src/css/values/color_js.zig
-1      src/compile_target.zig
-1      src/bundler/linker_context/convertStmtsForChunk.zig
-1      src/bundler/bundle_v2.zig
-1      src/bun.js/webcore/blob/read_file.zig
-1      src/ast/base.zig
-2      src/sql/postgres/protocol/ArrayList.zig
-2      src/shell/builtin/mkdir.zig
-2      src/install/PackageManager/patchPackage.zig
-2      src/install/PackageManager/PackageManagerDirectories.zig
-2      src/fmt.zig
-2      src/css/declaration.zig
-2      src/css/css_parser.zig
-2      src/collections/baby_list.zig
-2      src/bun.js/bindings/ZigStackFrame.zig
-2      src/ast/E.zig
-3      src/StandaloneModuleGraph.zig
-3      src/deps/picohttp.zig
-3      src/deps/libuv.zig
-3      src/btjs.zig
-4      src/threading/Futex.zig
-4      src/shell/builtin/touch.zig
-4      src/meta.zig
-4      src/install/lockfile.zig
-4      src/css/selectors/parser.zig
-5      src/shell/interpreter.zig
-5      src/css/error.zig
-5      src/bun.js/web_worker.zig
-5      src/bun.js.zig
-6      src/cli/test_command.zig
-6      src/bun.js/VirtualMachine.zig
-6      src/bun.js/uuid.zig
-6      src/bun.js/bindings/JSValue.zig
-9      src/bun.js/test/pretty_format.zig
-9      src/bun.js/api/BunObject.zig
-14     src/install/install_binding.zig
-14     src/fd.zig
-14     src/bun.js/node/path.zig
-14     scripts/pack-codegen-for-zig-team.sh
-17     src/bun.js/test/diff_format.zig
```

`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
2025-11-10 14:38:26 -08:00

438 lines
19 KiB
Zig

pub fn ParseImportExport(
comptime parser_feature__typescript: bool,
comptime parser_feature__jsx: JSXTransformType,
comptime parser_feature__scan_only: bool,
) type {
return struct {
const P = js_parser.NewParser_(parser_feature__typescript, parser_feature__jsx, parser_feature__scan_only);
const is_typescript_enabled = P.is_typescript_enabled;
const only_scan_imports_and_do_not_visit = P.only_scan_imports_and_do_not_visit;
/// Note: The caller has already parsed the "import" keyword
pub fn parseImportExpr(noalias p: *P, loc: logger.Loc, level: Level) anyerror!Expr {
// Parse an "import.meta" expression
if (p.lexer.token == .t_dot) {
p.esm_import_keyword = js_lexer.rangeOfIdentifier(p.source, loc);
try p.lexer.next();
if (p.lexer.isContextualKeyword("meta")) {
try p.lexer.next();
p.has_import_meta = true;
return p.newExpr(E.ImportMeta{}, loc);
} else {
try p.lexer.expectedString("\"meta\"");
}
}
if (level.gt(.call)) {
const r = js_lexer.rangeOfIdentifier(p.source, loc);
p.log.addRangeError(p.source, r, "Cannot use an \"import\" expression here without parentheses") catch unreachable;
}
// allow "in" inside call arguments;
const old_allow_in = p.allow_in;
p.allow_in = true;
p.lexer.preserve_all_comments_before = true;
try p.lexer.expect(.t_open_paren);
// const comments = try p.lexer.comments_to_preserve_before.toOwnedSlice();
p.lexer.comments_to_preserve_before.clearRetainingCapacity();
p.lexer.preserve_all_comments_before = false;
const value = try p.parseExpr(.comma);
var import_options = Expr.empty;
if (p.lexer.token == .t_comma) {
// "import('./foo.json', )"
try p.lexer.next();
if (p.lexer.token != .t_close_paren) {
// "import('./foo.json', { assert: { type: 'json' } })"
import_options = try p.parseExpr(.comma);
if (p.lexer.token == .t_comma) {
// "import('./foo.json', { assert: { type: 'json' } }, )"
try p.lexer.next();
}
}
}
try p.lexer.expect(.t_close_paren);
p.allow_in = old_allow_in;
if (comptime only_scan_imports_and_do_not_visit) {
if (value.data == .e_string and value.data.e_string.isUTF8() and value.data.e_string.isPresent()) {
const import_record_index = p.addImportRecord(.dynamic, value.loc, value.data.e_string.slice(p.allocator));
return p.newExpr(E.Import{
.expr = value,
// .leading_interior_comments = comments,
.import_record_index = import_record_index,
.options = import_options,
}, loc);
}
}
// _ = comments; // TODO: leading_interior comments
return p.newExpr(E.Import{
.expr = value,
// .leading_interior_comments = comments,
.import_record_index = std.math.maxInt(u32),
.options = import_options,
}, loc);
}
pub fn parseImportClause(
p: *P,
) !ImportClause {
var items = ListManaged(js_ast.ClauseItem).init(p.allocator);
try p.lexer.expect(.t_open_brace);
var is_single_line = !p.lexer.has_newline_before;
// this variable should not exist if we're not in a typescript file
var had_type_only_imports = if (comptime is_typescript_enabled)
false;
while (p.lexer.token != .t_close_brace) {
// The alias may be a keyword;
const isIdentifier = p.lexer.token == .t_identifier;
const alias_loc = p.lexer.loc();
const alias = try p.parseClauseAlias("import");
var name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(alias) };
var original_name = alias;
try p.lexer.next();
const probably_type_only_import = if (comptime is_typescript_enabled)
strings.eqlComptime(alias, "type") and
p.lexer.token != .t_comma and
p.lexer.token != .t_close_brace
else
false;
// "import { type xx } from 'mod'"
// "import { type xx as yy } from 'mod'"
// "import { type 'xx' as yy } from 'mod'"
// "import { type as } from 'mod'"
// "import { type as as } from 'mod'"
// "import { type as as as } from 'mod'"
if (probably_type_only_import) {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
original_name = p.lexer.identifier;
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
try p.lexer.next();
if (p.lexer.token == .t_identifier) {
// "import { type as as as } from 'mod'"
// "import { type as as foo } from 'mod'"
had_type_only_imports = true;
try p.lexer.next();
} else {
// "import { type as as } from 'mod'"
try items.append(.{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
} else if (p.lexer.token == .t_identifier) {
had_type_only_imports = true;
// "import { type as xxx } from 'mod'"
original_name = p.lexer.identifier;
name = LocRef{ .loc = p.lexer.loc(), .ref = try p.storeNameInRef(original_name) };
try p.lexer.expect(.t_identifier);
if (isEvalOrArguments(original_name)) {
const r = p.source.rangeOfString(name.loc);
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use {s} as an identifier here", .{original_name});
}
try items.append(.{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
} else {
const is_identifier = p.lexer.token == .t_identifier;
// "import { type xx } from 'mod'"
// "import { type xx as yy } from 'mod'"
// "import { type if as yy } from 'mod'"
// "import { type 'xx' as yy } from 'mod'"
_ = try p.parseClauseAlias("import");
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
try p.lexer.expect(.t_identifier);
} else if (!is_identifier) {
// An import where the name is a keyword must have an alias
try p.lexer.expectedString("\"as\"");
}
had_type_only_imports = true;
}
} else {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
original_name = p.lexer.identifier;
name = LocRef{ .loc = alias_loc, .ref = try p.storeNameInRef(original_name) };
try p.lexer.expect(.t_identifier);
} else if (!isIdentifier) {
// An import where the name is a keyword must have an alias
try p.lexer.expectedString("\"as\"");
}
// Reject forbidden names
if (isEvalOrArguments(original_name)) {
const r = js_lexer.rangeOfIdentifier(p.source, name.loc);
try p.log.addRangeErrorFmt(p.source, r, p.allocator, "Cannot use \"{s}\" as an identifier here", .{original_name});
}
try items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
});
}
if (p.lexer.token != .t_comma) {
break;
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
try p.lexer.next();
if (p.lexer.has_newline_before) {
is_single_line = false;
}
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
try p.lexer.expect(.t_close_brace);
return ImportClause{
.items = items.items,
.is_single_line = is_single_line,
.had_type_only_imports = if (comptime is_typescript_enabled)
had_type_only_imports
else
false,
};
}
pub fn parseExportClause(p: *P) !ExportClauseResult {
var items = ListManaged(js_ast.ClauseItem).initCapacity(p.allocator, 1) catch unreachable;
try p.lexer.expect(.t_open_brace);
var is_single_line = !p.lexer.has_newline_before;
var first_non_identifier_loc = logger.Loc{ .start = 0 };
var had_type_only_exports = false;
while (p.lexer.token != .t_close_brace) {
var alias = try p.parseClauseAlias("export");
var alias_loc = p.lexer.loc();
const name = LocRef{
.loc = alias_loc,
.ref = p.storeNameInRef(alias) catch unreachable,
};
const original_name = alias;
// The name can actually be a keyword if we're really an "export from"
// statement. However, we won't know until later. Allow keywords as
// identifiers for now and throw an error later if there's no "from".
//
// // This is fine
// export { default } from 'path'
//
// // This is a syntax error
// export { default }
//
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
first_non_identifier_loc = p.lexer.loc();
}
try p.lexer.next();
if (comptime is_typescript_enabled) {
if (strings.eqlComptime(alias, "type") and p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
alias = try p.parseClauseAlias("export");
alias_loc = p.lexer.loc();
try p.lexer.next();
if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
// "export { type as as as }"
// "export { type as as foo }"
// "export { type as as 'foo' }"
_ = p.parseClauseAlias("export") catch "";
had_type_only_exports = true;
try p.lexer.next();
} else {
// "export { type as as }"
items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
}) catch unreachable;
}
} else if (p.lexer.token != .t_comma and p.lexer.token != .t_close_brace) {
// "export { type as xxx }"
// "export { type as 'xxx' }"
alias = try p.parseClauseAlias("export");
alias_loc = p.lexer.loc();
try p.lexer.next();
items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
}) catch unreachable;
} else {
had_type_only_exports = true;
}
} else {
// The name can actually be a keyword if we're really an "export from"
// statement. However, we won't know until later. Allow keywords as
// identifiers for now and throw an error later if there's no "from".
//
// // This is fine
// export { default } from 'path'
//
// // This is a syntax error
// export { default }
//
if (p.lexer.token != .t_identifier and first_non_identifier_loc.start == 0) {
first_non_identifier_loc = p.lexer.loc();
}
// "export { type xx }"
// "export { type xx as yy }"
// "export { type xx as if }"
// "export { type default } from 'path'"
// "export { type default as if } from 'path'"
// "export { type xx as 'yy' }"
// "export { type 'xx' } from 'mod'"
_ = p.parseClauseAlias("export") catch "";
try p.lexer.next();
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
_ = p.parseClauseAlias("export") catch "";
try p.lexer.next();
}
had_type_only_exports = true;
}
} else {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
alias = try p.parseClauseAlias("export");
alias_loc = p.lexer.loc();
try p.lexer.next();
}
items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
}) catch unreachable;
}
} else {
if (p.lexer.isContextualKeyword("as")) {
try p.lexer.next();
alias = try p.parseClauseAlias("export");
alias_loc = p.lexer.loc();
try p.lexer.next();
}
items.append(js_ast.ClauseItem{
.alias = alias,
.alias_loc = alias_loc,
.name = name,
.original_name = original_name,
}) catch unreachable;
}
// we're done if there's no comma
if (p.lexer.token != .t_comma) {
break;
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
try p.lexer.next();
if (p.lexer.has_newline_before) {
is_single_line = false;
}
}
if (p.lexer.has_newline_before) {
is_single_line = false;
}
try p.lexer.expect(.t_close_brace);
// Throw an error here if we found a keyword earlier and this isn't an
// "export from" statement after all
if (first_non_identifier_loc.start != 0 and !p.lexer.isContextualKeyword("from")) {
const r = js_lexer.rangeOfIdentifier(p.source, first_non_identifier_loc);
try p.lexer.addRangeError(r, "Expected identifier but found \"{s}\"", .{p.source.textForRange(r)}, true);
return error.SyntaxError;
}
return ExportClauseResult{
.clauses = items.items,
.is_single_line = is_single_line,
.had_type_only_exports = had_type_only_exports,
};
}
};
}
const std = @import("std");
const ListManaged = std.array_list.Managed;
const bun = @import("bun");
const assert = bun.assert;
const js_lexer = bun.js_lexer;
const logger = bun.logger;
const strings = bun.strings;
const js_ast = bun.ast;
const E = js_ast.E;
const Expr = js_ast.Expr;
const LocRef = js_ast.LocRef;
const Op = js_ast.Op;
const Level = js_ast.Op.Level;
const js_parser = bun.js_parser;
const ExportClauseResult = js_parser.ExportClauseResult;
const ImportClause = js_parser.ImportClause;
const JSXTransformType = js_parser.JSXTransformType;
const isEvalOrArguments = js_parser.isEvalOrArguments;
const options = js_parser.options;