mirror of
https://github.com/oven-sh/bun
synced 2026-02-12 11:59:00 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
424 lines
16 KiB
Zig
424 lines
16 KiB
Zig
// This file is the old linker, used by Bun.Transpiler.
|
|
|
|
pub const CSSResolveError = error{ResolveMessage};
|
|
|
|
pub const OnImportCallback = *const fn (resolve_result: *const Resolver.Result, import_record: *ImportRecord, origin: URL) void;
|
|
|
|
pub const Linker = struct {
|
|
const HashedFileNameMap = std.AutoHashMap(u64, string);
|
|
const ThisLinker = @This();
|
|
allocator: std.mem.Allocator,
|
|
options: *Options.BundleOptions,
|
|
fs: *Fs.FileSystem,
|
|
log: *logger.Log,
|
|
resolve_queue: *ResolveQueue,
|
|
resolver: *ResolverType,
|
|
resolve_results: *_transpiler.ResolveResults,
|
|
any_needs_runtime: bool = false,
|
|
runtime_import_record: ?ImportRecord = null,
|
|
hashed_filenames: HashedFileNameMap,
|
|
import_counter: usize = 0,
|
|
tagged_resolutions: TaggedResolution = TaggedResolution{},
|
|
|
|
plugin_runner: ?*PluginRunner = null,
|
|
|
|
pub const runtime_source_path = "bun:wrap";
|
|
|
|
pub const TaggedResolution = struct {
|
|
react_refresh: ?Resolver.Result = null,
|
|
|
|
// These tags cannot safely be used
|
|
// Projects may use different JSX runtimes across folders
|
|
// jsx_import: ?Resolver.Result = null,
|
|
// jsx_classic: ?Resolver.Result = null,
|
|
};
|
|
|
|
pub fn init(
|
|
allocator: std.mem.Allocator,
|
|
log: *logger.Log,
|
|
resolve_queue: *ResolveQueue,
|
|
options: *Options.BundleOptions,
|
|
resolver: *ResolverType,
|
|
resolve_results: *_transpiler.ResolveResults,
|
|
fs: *Fs.FileSystem,
|
|
) ThisLinker {
|
|
relative_paths_list = ImportPathsList.init(allocator);
|
|
|
|
return ThisLinker{
|
|
.allocator = allocator,
|
|
.options = options,
|
|
.fs = fs,
|
|
.log = log,
|
|
.resolve_queue = resolve_queue,
|
|
.resolver = resolver,
|
|
.resolve_results = resolve_results,
|
|
.hashed_filenames = HashedFileNameMap.init(allocator),
|
|
};
|
|
}
|
|
|
|
pub fn getModKey(
|
|
this: *ThisLinker,
|
|
file_path: Fs.Path,
|
|
fd: ?FileDescriptorType,
|
|
) !Fs.FileSystem.RealFS.ModKey {
|
|
var file: std.fs.File = if (fd) |_fd| _fd.stdFile() else try std.fs.cwd().openFile(file_path.text, .{ .mode = .read_only });
|
|
Fs.FileSystem.setMaxFd(file.handle);
|
|
const modkey = try Fs.FileSystem.RealFS.ModKey.generate(&this.fs.fs, file_path.text, file);
|
|
|
|
if (fd == null)
|
|
file.close();
|
|
return modkey;
|
|
}
|
|
|
|
pub fn getHashedFilename(
|
|
this: *ThisLinker,
|
|
file_path: Fs.Path,
|
|
fd: ?FileDescriptorType,
|
|
) !string {
|
|
if (Transpiler.isCacheEnabled) {
|
|
const hashed = bun.hash(file_path.text);
|
|
const hashed_result = try this.hashed_filenames.getOrPut(hashed);
|
|
if (hashed_result.found_existing) {
|
|
return hashed_result.value_ptr.*;
|
|
}
|
|
}
|
|
|
|
const modkey = try this.getModKey(file_path, fd);
|
|
const hash_name = modkey.hashName(file_path.text);
|
|
|
|
if (Transpiler.isCacheEnabled) {
|
|
const hashed = bun.hash(file_path.text);
|
|
try this.hashed_filenames.put(hashed, try this.allocator.dupe(u8, hash_name));
|
|
}
|
|
|
|
return hash_name;
|
|
}
|
|
|
|
// This modifies the Ast in-place!
|
|
// But more importantly, this does the following:
|
|
// - Wrap CommonJS files
|
|
pub fn link(
|
|
linker: *ThisLinker,
|
|
file_path: Fs.Path,
|
|
result: *_transpiler.ParseResult,
|
|
origin: URL,
|
|
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
|
|
comptime ignore_runtime: bool,
|
|
comptime is_bun: bool,
|
|
) !void {
|
|
const source_dir = file_path.sourceDir();
|
|
var externals = std.array_list.Managed(u32).init(linker.allocator);
|
|
var had_resolve_errors = false;
|
|
|
|
const is_deferred = result.pending_imports.len > 0;
|
|
|
|
// Step 1. Resolve imports & requires
|
|
switch (result.loader) {
|
|
.jsx, .js, .ts, .tsx => {
|
|
for (result.ast.import_records.slice(), 0..) |*import_record, record_i| {
|
|
if (import_record.is_unused or
|
|
(is_bun and is_deferred and !result.isPendingImport(@intCast(record_i)))) continue;
|
|
|
|
const record_index = record_i;
|
|
if (comptime !ignore_runtime) {
|
|
if (strings.eqlComptime(import_record.path.namespace, "runtime")) {
|
|
if (import_path_format == .absolute_url) {
|
|
import_record.path = Fs.Path.initWithNamespace(try origin.joinAlloc(linker.allocator, "", "", "bun:wrap", "", ""), "bun");
|
|
} else {
|
|
import_record.path = try linker.generateImportPath(
|
|
source_dir,
|
|
Linker.runtime_source_path,
|
|
false,
|
|
"bun",
|
|
origin,
|
|
import_path_format,
|
|
);
|
|
}
|
|
|
|
result.ast.runtime_import_record_id = @intCast(record_index);
|
|
result.ast.needs_runtime = true;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
if (comptime is_bun) {
|
|
if (jsc.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, linker.options.target, .{ .rewrite_jest_for_tests = linker.options.rewrite_jest_for_tests })) |replacement| {
|
|
if (replacement.tag == .builtin and import_record.kind.isCommonJS())
|
|
continue;
|
|
import_record.path.text = replacement.path;
|
|
import_record.tag = replacement.tag;
|
|
import_record.is_external_without_side_effects = true;
|
|
continue;
|
|
}
|
|
if (strings.startsWith(import_record.path.text, "node:")) {
|
|
// if a module is not found here, it is not found at all
|
|
// so we can just disable it
|
|
had_resolve_errors = try whenModuleNotFound(linker, import_record, result, is_bun);
|
|
|
|
if (had_resolve_errors) return error.ResolveMessage;
|
|
continue;
|
|
}
|
|
|
|
if (strings.hasPrefixComptime(import_record.path.text, "bun:")) {
|
|
import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]);
|
|
import_record.path.namespace = "bun";
|
|
|
|
// don't link bun
|
|
continue;
|
|
}
|
|
|
|
// Resolve dynamic imports lazily for perf
|
|
if (import_record.kind == .dynamic) {
|
|
continue;
|
|
}
|
|
}
|
|
|
|
if (linker.plugin_runner) |runner| {
|
|
if (PluginRunner.couldBePlugin(import_record.path.text)) {
|
|
if (try runner.onResolve(
|
|
import_record.path.text,
|
|
file_path.text,
|
|
linker.log,
|
|
import_record.range.loc,
|
|
if (is_bun)
|
|
jsc.JSGlobalObject.BunPluginTarget.bun
|
|
else if (linker.options.target == .browser)
|
|
jsc.JSGlobalObject.BunPluginTarget.browser
|
|
else
|
|
jsc.JSGlobalObject.BunPluginTarget.node,
|
|
)) |path| {
|
|
import_record.path = try linker.generateImportPath(
|
|
source_dir,
|
|
path.text,
|
|
false,
|
|
path.namespace,
|
|
origin,
|
|
import_path_format,
|
|
);
|
|
import_record.print_namespace_in_path = true;
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
|
|
else => {},
|
|
}
|
|
if (had_resolve_errors) return error.ResolveMessage;
|
|
externals.clearAndFree();
|
|
}
|
|
|
|
fn whenModuleNotFound(
|
|
linker: *ThisLinker,
|
|
import_record: *ImportRecord,
|
|
result: *_transpiler.ParseResult,
|
|
comptime is_bun: bool,
|
|
) !bool {
|
|
if (import_record.handles_import_errors) {
|
|
import_record.path.is_disabled = true;
|
|
return false;
|
|
}
|
|
|
|
if (comptime is_bun) {
|
|
// make these happen at runtime
|
|
if (import_record.kind == .require or import_record.kind == .require_resolve) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
if (import_record.path.text.len > 0 and Resolver.isPackagePath(import_record.path.text)) {
|
|
if (linker.options.target == .browser and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
|
|
try linker.log.addResolveError(
|
|
&result.source,
|
|
import_record.range,
|
|
linker.allocator,
|
|
"Could not resolve: \"{s}\". Try setting --target=\"node\"",
|
|
.{import_record.path.text},
|
|
import_record.kind,
|
|
error.ModuleNotFound,
|
|
);
|
|
} else {
|
|
try linker.log.addResolveError(
|
|
&result.source,
|
|
import_record.range,
|
|
linker.allocator,
|
|
"Could not resolve: \"{s}\". Maybe you need to \"bun install\"?",
|
|
.{import_record.path.text},
|
|
import_record.kind,
|
|
error.ModuleNotFound,
|
|
);
|
|
}
|
|
} else {
|
|
try linker.log.addResolveError(
|
|
&result.source,
|
|
import_record.range,
|
|
linker.allocator,
|
|
"Could not resolve: \"{s}\"",
|
|
.{
|
|
import_record.path.text,
|
|
},
|
|
import_record.kind,
|
|
error.ModuleNotFound,
|
|
);
|
|
}
|
|
return true;
|
|
}
|
|
|
|
const ImportPathsList = allocators.BSSStringList(512, 128);
|
|
pub var relative_paths_list: *ImportPathsList = undefined;
|
|
|
|
pub fn generateImportPath(
|
|
linker: *ThisLinker,
|
|
source_dir: string,
|
|
source_path: string,
|
|
use_hashed_name: bool,
|
|
namespace: string,
|
|
origin: URL,
|
|
comptime import_path_format: Options.BundleOptions.ImportPathFormat,
|
|
) !Fs.Path {
|
|
switch (import_path_format) {
|
|
.absolute_path => {
|
|
if (strings.eqlComptime(namespace, "node")) {
|
|
return Fs.Path.initWithNamespace(source_path, "node");
|
|
}
|
|
|
|
if (strings.eqlComptime(namespace, "bun") or strings.eqlComptime(namespace, "file") or namespace.len == 0) {
|
|
const relative_name = linker.fs.relative(source_dir, source_path);
|
|
return Fs.Path.initWithPretty(source_path, relative_name);
|
|
} else {
|
|
return Fs.Path.initWithNamespace(source_path, namespace);
|
|
}
|
|
},
|
|
.relative => {
|
|
var relative_name = linker.fs.relative(source_dir, source_path);
|
|
|
|
var pretty: string = undefined;
|
|
if (use_hashed_name) {
|
|
var basepath = Fs.Path.init(source_path);
|
|
const basename = try linker.getHashedFilename(basepath, null);
|
|
const dir = basepath.name.dirWithTrailingSlash();
|
|
var _pretty = try linker.allocator.alloc(u8, dir.len + basename.len + basepath.name.ext.len);
|
|
bun.copy(u8, _pretty, dir);
|
|
var remaining_pretty = _pretty[dir.len..];
|
|
bun.copy(u8, remaining_pretty, basename);
|
|
remaining_pretty = remaining_pretty[basename.len..];
|
|
bun.copy(u8, remaining_pretty, basepath.name.ext);
|
|
pretty = _pretty;
|
|
relative_name = try linker.allocator.dupe(u8, relative_name);
|
|
} else {
|
|
if (relative_name.len > 1 and !(relative_name[0] == std.fs.path.sep or relative_name[0] == '.')) {
|
|
pretty = try strings.concat(linker.allocator, &.{ "./", relative_name });
|
|
} else {
|
|
pretty = try linker.allocator.dupe(u8, relative_name);
|
|
}
|
|
|
|
relative_name = pretty;
|
|
}
|
|
|
|
return Fs.Path.initWithPretty(pretty, relative_name);
|
|
},
|
|
|
|
.absolute_url => {
|
|
if (strings.eqlComptime(namespace, "node")) {
|
|
if (comptime Environment.isDebug) bun.assert(strings.eqlComptime(source_path[0..5], "node:"));
|
|
|
|
return Fs.Path.init(try std.fmt.allocPrint(
|
|
linker.allocator,
|
|
// assumption: already starts with "node:"
|
|
"{s}/{s}",
|
|
.{
|
|
strings.withoutTrailingSlash(origin.href),
|
|
strings.withoutLeadingSlash(source_path),
|
|
},
|
|
));
|
|
} else {
|
|
var absolute_pathname = Fs.PathName.init(source_path);
|
|
|
|
if (!linker.options.preserve_extensions) {
|
|
if (linker.options.out_extensions.get(absolute_pathname.ext)) |ext| {
|
|
absolute_pathname.ext = ext;
|
|
}
|
|
}
|
|
|
|
var base = linker.fs.relativeTo(source_path);
|
|
if (strings.lastIndexOfChar(base, '.')) |dot| {
|
|
base = base[0..dot];
|
|
}
|
|
|
|
const dirname = std.fs.path.dirname(base) orelse "";
|
|
|
|
var basename = std.fs.path.basename(base);
|
|
|
|
if (use_hashed_name) {
|
|
const basepath = Fs.Path.init(source_path);
|
|
|
|
basename = try linker.getHashedFilename(basepath, null);
|
|
}
|
|
|
|
return Fs.Path.init(try origin.joinAlloc(
|
|
linker.allocator,
|
|
"",
|
|
dirname,
|
|
basename,
|
|
absolute_pathname.ext,
|
|
source_path,
|
|
));
|
|
}
|
|
},
|
|
|
|
else => unreachable,
|
|
}
|
|
}
|
|
|
|
pub fn resolveResultHashKey(linker: *ThisLinker, resolve_result: *const Resolver.Result) u64 {
|
|
const path = resolve_result.pathConst() orelse unreachable;
|
|
var hash_key = path.text;
|
|
|
|
// Shorter hash key is faster to hash
|
|
if (strings.startsWith(path.text, linker.fs.top_level_dir)) {
|
|
hash_key = path.text[linker.fs.top_level_dir.len..];
|
|
}
|
|
|
|
return bun.hash(hash_key);
|
|
}
|
|
|
|
pub fn enqueueResolveResult(linker: *ThisLinker, resolve_result: *const Resolver.Result) !bool {
|
|
const hash_key = linker.resolveResultHashKey(resolve_result);
|
|
|
|
const get_or_put_entry = try linker.resolve_results.getOrPut(hash_key);
|
|
|
|
if (!get_or_put_entry.found_existing) {
|
|
try linker.resolve_queue.writeItem(resolve_result.*);
|
|
}
|
|
|
|
return !get_or_put_entry.found_existing;
|
|
}
|
|
};
|
|
|
|
const string = []const u8;
|
|
|
|
const Fs = @import("./fs.zig");
|
|
const Options = @import("./options.zig");
|
|
const std = @import("std");
|
|
const URL = @import("./url.zig").URL;
|
|
|
|
const _import_record = @import("./import_record.zig");
|
|
const ImportRecord = _import_record.ImportRecord;
|
|
|
|
const Resolver = @import("./resolver/resolver.zig");
|
|
const ResolverType = Resolver.Resolver;
|
|
|
|
const bun = @import("bun");
|
|
const Environment = bun.Environment;
|
|
const FileDescriptorType = bun.FileDescriptor;
|
|
const allocators = bun.allocators;
|
|
const jsc = bun.jsc;
|
|
const logger = bun.logger;
|
|
const strings = bun.strings;
|
|
|
|
const _transpiler = bun.transpiler;
|
|
const PluginRunner = bun.transpiler.PluginRunner;
|
|
const ResolveQueue = _transpiler.ResolveQueue;
|
|
const Transpiler = _transpiler.Transpiler;
|