mirror of
https://github.com/oven-sh/bun
synced 2026-02-13 20:39:05 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
381 lines
16 KiB
Zig
381 lines
16 KiB
Zig
pub fn installHoistedPackages(
|
|
this: *PackageManager,
|
|
ctx: Command.Context,
|
|
workspace_filters: []const WorkspaceFilter,
|
|
install_root_dependencies: bool,
|
|
log_level: PackageManager.Options.LogLevel,
|
|
packages_to_install: ?[]const PackageID,
|
|
) !PackageInstall.Summary {
|
|
bun.analytics.Features.hoisted_bun_install += 1;
|
|
|
|
const original_trees = this.lockfile.buffers.trees;
|
|
const original_tree_dep_ids = this.lockfile.buffers.hoisted_dependencies;
|
|
|
|
try this.lockfile.filter(this.log, this, install_root_dependencies, workspace_filters, packages_to_install);
|
|
|
|
defer {
|
|
this.lockfile.buffers.trees = original_trees;
|
|
this.lockfile.buffers.hoisted_dependencies = original_tree_dep_ids;
|
|
}
|
|
|
|
var root_node: *Progress.Node = undefined;
|
|
var download_node: Progress.Node = undefined;
|
|
var install_node: Progress.Node = undefined;
|
|
var scripts_node: Progress.Node = undefined;
|
|
const options = &this.options;
|
|
var progress = &this.progress;
|
|
|
|
if (log_level.showProgress()) {
|
|
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
|
|
root_node = progress.start("", 0);
|
|
download_node = root_node.start(ProgressStrings.download(), 0);
|
|
|
|
install_node = root_node.start(ProgressStrings.install(), this.lockfile.buffers.hoisted_dependencies.items.len);
|
|
scripts_node = root_node.start(ProgressStrings.script(), 0);
|
|
this.downloads_node = &download_node;
|
|
this.scripts_node = &scripts_node;
|
|
}
|
|
|
|
defer {
|
|
if (log_level.showProgress()) {
|
|
progress.root.end();
|
|
progress.* = .{};
|
|
}
|
|
}
|
|
|
|
// If there was already a valid lockfile and so we did not resolve, i.e. there was zero network activity
|
|
// the packages could still not be in the cache dir
|
|
// this would be a common scenario in a CI environment
|
|
// or if you just cloned a repo
|
|
// we want to check lazily though
|
|
// no need to download packages you've already installed!!
|
|
var new_node_modules = false;
|
|
const cwd = bun.FD.cwd();
|
|
const node_modules_folder = brk: {
|
|
// Attempt to open the existing node_modules folder
|
|
switch (bun.sys.openatOSPath(cwd, bun.OSPathLiteral("node_modules"), bun.O.DIRECTORY | bun.O.RDONLY, 0o755)) {
|
|
.result => |fd| break :brk std.fs.Dir{ .fd = fd.cast() },
|
|
.err => {},
|
|
}
|
|
|
|
new_node_modules = true;
|
|
|
|
// Attempt to create a new node_modules folder
|
|
if (bun.sys.mkdir("node_modules", 0o755).asErr()) |err| {
|
|
if (err.errno != @intFromEnum(bun.sys.E.EXIST)) {
|
|
Output.err(err, "could not create the <b>\"node_modules\"<r> directory", .{});
|
|
Global.crash();
|
|
}
|
|
}
|
|
break :brk bun.openDir(cwd.stdDir(), "node_modules") catch |err| {
|
|
Output.err(err, "could not open the <b>\"node_modules\"<r> directory", .{});
|
|
Global.crash();
|
|
};
|
|
};
|
|
|
|
var skip_delete = new_node_modules;
|
|
var skip_verify_installed_version_number = new_node_modules;
|
|
|
|
if (options.enable.force_install) {
|
|
skip_verify_installed_version_number = true;
|
|
skip_delete = false;
|
|
}
|
|
|
|
var summary = PackageInstall.Summary{};
|
|
|
|
{
|
|
var iterator = Lockfile.Tree.Iterator(.node_modules).init(this.lockfile);
|
|
if (comptime Environment.isPosix) {
|
|
Bin.Linker.ensureUmask();
|
|
}
|
|
var installer: PackageInstaller = brk: {
|
|
const completed_trees, const tree_ids_to_trees_the_id_depends_on = trees: {
|
|
const trees = this.lockfile.buffers.trees.items;
|
|
const completed_trees = try Bitset.initEmpty(this.allocator, trees.len);
|
|
var tree_ids_to_trees_the_id_depends_on = try Bitset.List.initEmpty(this.allocator, trees.len, trees.len);
|
|
|
|
{
|
|
// For each tree id, traverse through it's parents and mark all visited tree
|
|
// ids as dependents for the current tree parent
|
|
var deps = try Bitset.initEmpty(this.allocator, trees.len);
|
|
defer deps.deinit(this.allocator);
|
|
for (trees) |_curr| {
|
|
var curr = _curr;
|
|
tree_ids_to_trees_the_id_depends_on.set(curr.id, curr.id);
|
|
|
|
while (curr.parent != Lockfile.Tree.invalid_id) {
|
|
deps.set(curr.id);
|
|
tree_ids_to_trees_the_id_depends_on.setUnion(curr.parent, deps);
|
|
curr = trees[curr.parent];
|
|
}
|
|
|
|
deps.setAll(false);
|
|
}
|
|
}
|
|
|
|
if (comptime Environment.allow_assert) {
|
|
if (trees.len > 0) {
|
|
// last tree should only depend on one other
|
|
bun.assertWithLocation(tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).count() == 1, @src());
|
|
// and it should be itself
|
|
bun.assertWithLocation(tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).isSet(trees.len - 1), @src());
|
|
|
|
// root tree should always depend on all trees
|
|
bun.assertWithLocation(tree_ids_to_trees_the_id_depends_on.at(0).count() == trees.len, @src());
|
|
}
|
|
|
|
// a tree should always depend on itself
|
|
for (0..trees.len) |j| {
|
|
bun.assertWithLocation(tree_ids_to_trees_the_id_depends_on.at(j).isSet(j), @src());
|
|
}
|
|
}
|
|
|
|
break :trees .{
|
|
completed_trees,
|
|
tree_ids_to_trees_the_id_depends_on,
|
|
};
|
|
};
|
|
|
|
// These slices potentially get resized during iteration
|
|
// so we want to make sure they're not accessible to the rest of this function
|
|
// to make mistakes harder
|
|
var parts = this.lockfile.packages.slice();
|
|
|
|
break :brk PackageInstaller{
|
|
.manager = this,
|
|
.options = &this.options,
|
|
.metas = parts.items(.meta),
|
|
.bins = parts.items(.bin),
|
|
.root_node_modules_folder = node_modules_folder,
|
|
.names = parts.items(.name),
|
|
.pkg_name_hashes = parts.items(.name_hash),
|
|
.resolutions = parts.items(.resolution),
|
|
.pkg_dependencies = parts.items(.dependencies),
|
|
.lockfile = this.lockfile,
|
|
.node = &install_node,
|
|
.node_modules = .{
|
|
.path = std.array_list.Managed(u8).fromOwnedSlice(
|
|
this.allocator,
|
|
try this.allocator.dupe(
|
|
u8,
|
|
strings.withoutTrailingSlash(FileSystem.instance.top_level_dir),
|
|
),
|
|
),
|
|
.tree_id = 0,
|
|
},
|
|
.progress = progress,
|
|
.skip_verify_installed_version_number = skip_verify_installed_version_number,
|
|
.skip_delete = skip_delete,
|
|
.summary = &summary,
|
|
.force_install = options.enable.force_install,
|
|
.successfully_installed = try Bitset.initEmpty(
|
|
this.allocator,
|
|
this.lockfile.packages.len,
|
|
),
|
|
.command_ctx = ctx,
|
|
.tree_ids_to_trees_the_id_depends_on = tree_ids_to_trees_the_id_depends_on,
|
|
.completed_trees = completed_trees,
|
|
.trees = trees: {
|
|
const trees = bun.handleOom(this.allocator.alloc(TreeContext, this.lockfile.buffers.trees.items.len));
|
|
for (0..this.lockfile.buffers.trees.items.len) |i| {
|
|
trees[i] = .{
|
|
.binaries = Bin.PriorityQueue.init(this.allocator, .{
|
|
.dependencies = &this.lockfile.buffers.dependencies,
|
|
.string_buf = &this.lockfile.buffers.string_bytes,
|
|
}),
|
|
};
|
|
}
|
|
break :trees trees;
|
|
},
|
|
.trusted_dependencies_from_update_requests = this.findTrustedDependenciesFromUpdateRequests(),
|
|
.seen_bin_links = bun.StringHashMap(void).init(this.allocator),
|
|
};
|
|
};
|
|
|
|
try installer.node_modules.path.append(std.fs.path.sep);
|
|
|
|
defer installer.deinit();
|
|
|
|
while (iterator.next(&installer.completed_trees)) |node_modules| {
|
|
installer.node_modules.path.items.len = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir).len + 1;
|
|
try installer.node_modules.path.appendSlice(node_modules.relative_path);
|
|
installer.node_modules.tree_id = node_modules.tree_id;
|
|
var remaining = node_modules.dependencies;
|
|
installer.current_tree_id = node_modules.tree_id;
|
|
|
|
// cache line is 64 bytes on ARM64 and x64
|
|
// PackageIDs are 4 bytes
|
|
// Hence, we can fit up to 64 / 4 = 16 package IDs in a cache line
|
|
const unroll_count = comptime 64 / @sizeOf(PackageID);
|
|
|
|
while (remaining.len > unroll_count) {
|
|
comptime var i: usize = 0;
|
|
inline while (i < unroll_count) : (i += 1) {
|
|
installer.installPackage(remaining[i], log_level);
|
|
}
|
|
remaining = remaining[unroll_count..];
|
|
|
|
// We want to minimize how often we call this function
|
|
// That's part of why we unroll this loop
|
|
if (this.pendingTaskCount() > 0) {
|
|
try this.runTasks(
|
|
*PackageInstaller,
|
|
&installer,
|
|
.{
|
|
.onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction,
|
|
.onResolve = {},
|
|
.onPackageManifestError = {},
|
|
.onPackageDownloadError = {},
|
|
},
|
|
true,
|
|
log_level,
|
|
);
|
|
if (!installer.options.do.install_packages) return error.InstallFailed;
|
|
}
|
|
this.tickLifecycleScripts();
|
|
this.reportSlowLifecycleScripts();
|
|
}
|
|
|
|
for (remaining) |dependency_id| {
|
|
installer.installPackage(dependency_id, log_level);
|
|
}
|
|
|
|
try this.runTasks(
|
|
*PackageInstaller,
|
|
&installer,
|
|
.{
|
|
.onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction,
|
|
.onResolve = {},
|
|
.onPackageManifestError = {},
|
|
.onPackageDownloadError = {},
|
|
},
|
|
true,
|
|
log_level,
|
|
);
|
|
if (!installer.options.do.install_packages) return error.InstallFailed;
|
|
|
|
this.tickLifecycleScripts();
|
|
this.reportSlowLifecycleScripts();
|
|
}
|
|
|
|
while (this.pendingTaskCount() > 0 and installer.options.do.install_packages) {
|
|
const Closure = struct {
|
|
installer: *PackageInstaller,
|
|
err: ?anyerror = null,
|
|
manager: *PackageManager,
|
|
|
|
pub fn isDone(closure: *@This()) bool {
|
|
const pm = closure.manager;
|
|
closure.manager.runTasks(
|
|
*PackageInstaller,
|
|
closure.installer,
|
|
.{
|
|
.onExtract = PackageInstaller.installEnqueuedPackagesAfterExtraction,
|
|
.onResolve = {},
|
|
.onPackageManifestError = {},
|
|
.onPackageDownloadError = {},
|
|
},
|
|
true,
|
|
pm.options.log_level,
|
|
) catch |err| {
|
|
closure.err = err;
|
|
};
|
|
|
|
if (closure.err != null) {
|
|
return true;
|
|
}
|
|
|
|
closure.manager.reportSlowLifecycleScripts();
|
|
|
|
if (PackageManager.verbose_install and closure.manager.pendingTaskCount() > 0) {
|
|
const pending_task_count = closure.manager.pendingTaskCount();
|
|
if (pending_task_count > 0 and PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) {
|
|
Output.prettyErrorln("<d>[PackageManager]<r> waiting for {d} tasks\n", .{pending_task_count});
|
|
}
|
|
}
|
|
|
|
return closure.manager.pendingTaskCount() == 0 and closure.manager.hasNoMorePendingLifecycleScripts();
|
|
}
|
|
};
|
|
|
|
var closure = Closure{
|
|
.installer = &installer,
|
|
.manager = this,
|
|
};
|
|
|
|
// Whenever the event loop wakes up, we need to call `runTasks`
|
|
// If we call sleep() instead of sleepUntil(), it will wait forever until there are no more lifecycle scripts
|
|
// which means it will not call runTasks until _all_ current lifecycle scripts have finished running
|
|
this.sleepUntil(&closure, &Closure.isDone);
|
|
|
|
if (closure.err) |err| {
|
|
return err;
|
|
}
|
|
} else {
|
|
this.tickLifecycleScripts();
|
|
this.reportSlowLifecycleScripts();
|
|
}
|
|
|
|
for (installer.trees) |tree| {
|
|
if (comptime Environment.allow_assert) {
|
|
bun.assert(tree.pending_installs.items.len == 0);
|
|
}
|
|
const force = true;
|
|
installer.installAvailablePackages(log_level, force);
|
|
}
|
|
|
|
// .monotonic is okay because this value is only accessed on this thread.
|
|
this.finished_installing.store(true, .monotonic);
|
|
if (log_level.showProgress()) {
|
|
scripts_node.activate();
|
|
}
|
|
|
|
if (!installer.options.do.install_packages) return error.InstallFailed;
|
|
|
|
summary.successfully_installed = installer.successfully_installed;
|
|
|
|
// need to make sure bins are linked before completing any remaining scripts.
|
|
// this can happen if a package fails to download
|
|
installer.linkRemainingBins(log_level);
|
|
installer.completeRemainingScripts(log_level);
|
|
|
|
// .monotonic is okay because this value is only accessed on this thread.
|
|
while (this.pending_lifecycle_script_tasks.load(.monotonic) > 0) {
|
|
this.reportSlowLifecycleScripts();
|
|
|
|
this.sleep();
|
|
}
|
|
|
|
if (log_level.showProgress()) {
|
|
scripts_node.end();
|
|
}
|
|
}
|
|
|
|
return summary;
|
|
}
|
|
|
|
const std = @import("std");
|
|
|
|
const bun = @import("bun");
|
|
const Environment = bun.Environment;
|
|
const Global = bun.Global;
|
|
const Output = bun.Output;
|
|
const Progress = bun.Progress;
|
|
const strings = bun.strings;
|
|
const Bitset = bun.bit_set.DynamicBitSetUnmanaged;
|
|
const Command = bun.cli.Command;
|
|
const FileSystem = bun.fs.FileSystem;
|
|
|
|
const install = bun.install;
|
|
const Bin = install.Bin;
|
|
const Lockfile = install.Lockfile;
|
|
const PackageID = install.PackageID;
|
|
const PackageInstall = install.PackageInstall;
|
|
|
|
const PackageManager = install.PackageManager;
|
|
const ProgressStrings = PackageManager.ProgressStrings;
|
|
const WorkspaceFilter = PackageManager.WorkspaceFilter;
|
|
|
|
const PackageInstaller = PackageManager.PackageInstaller;
|
|
const TreeContext = PackageInstaller.TreeContext;
|