Compare commits

...

1 Commits

Author SHA1 Message Date
Dylan Conway
9ccc94bfc4 delete stale packages 2025-01-05 18:02:32 -08:00
8 changed files with 473 additions and 60 deletions

View File

@@ -15,7 +15,7 @@ const PackageID = Install.PackageID;
const DependencyID = Install.DependencyID;
const PackageManager = Install.PackageManager;
const Lockfile = @import("../install/lockfile.zig");
const NodeModulesFolder = Lockfile.Tree.Iterator(.node_modules).Next;
const NodeModulesFolder = Lockfile.Tree.Iterator(Lockfile, .node_modules).Next;
const Path = @import("../resolver/resolve_path.zig");
const String = @import("../install/semver.zig").String;
const ArrayIdentityContext = bun.ArrayIdentityContext;
@@ -301,7 +301,7 @@ pub const PackageManagerCommand = struct {
Output.flush();
Output.disableBuffering();
const lockfile = load_lockfile.ok.lockfile;
var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile);
var iterator = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(lockfile);
var max_depth: usize = 0;

View File

@@ -71,7 +71,7 @@ pub const UntrustedCommand = struct {
var untrusted_deps: std.AutoArrayHashMapUnmanaged(DependencyID, Lockfile.Package.Scripts.List) = .{};
defer untrusted_deps.deinit(ctx.allocator);
var tree_iterator = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile);
var tree_iterator = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(pm.lockfile);
const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir);
var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{};
@@ -225,7 +225,7 @@ pub const TrustCommand = struct {
// Instead of running them right away, we group scripts by depth in the node_modules
// file structure, then run them starting at max depth. This ensures lifecycle scripts are run
// in the correct order as they would during a normal install
var tree_iter = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile);
var tree_iter = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(pm.lockfile);
const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir);
var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{};

View File

@@ -372,7 +372,7 @@ pub const Stringifier = struct {
var tree_sort_buf: std.ArrayListUnmanaged(TreeSortCtx.Item) = .{};
defer tree_sort_buf.deinit(allocator);
var pkgs_iter = BinaryLockfile.Tree.Iterator(.pkg_path).init(lockfile);
var pkgs_iter = BinaryLockfile.Tree.Iterator(BinaryLockfile, .pkg_path).init(lockfile);
// find trusted and patched dependencies. also overrides
while (pkgs_iter.next({})) |node| {

View File

@@ -59,14 +59,17 @@ behavior: Behavior = .{},
/// "name" must be ASC so that later, when we rebuild the lockfile
/// we insert it back in reverse order without an extra sorting pass
pub fn isLessThan(string_buf: []const u8, lhs: Dependency, rhs: Dependency) bool {
const behavior = lhs.behavior.cmp(rhs.behavior);
if (behavior != .eq) {
return behavior == .lt;
}
const lhs_name = lhs.name.slice(string_buf);
const rhs_name = rhs.name.slice(string_buf);
return strings.cmpStringsAsc({}, lhs_name, rhs_name);
return order(lhs_name, lhs.behavior, rhs_name, rhs.behavior) == .lt;
}
pub fn order(l_name: string, l_behavior: Behavior, r_name: string, r_behavior: Behavior) std.math.Order {
return switch (l_behavior.cmp(r_behavior)) {
.eq => strings.order(l_name, r_name),
else => |lt_or_gt| lt_or_gt,
};
}
pub fn countWithDifferentBuffers(this: *const Dependency, name_buf: []const u8, version_buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void {

View File

@@ -65,6 +65,7 @@ threadlocal var initialized_store = false;
const Futex = @import("../futex.zig");
pub const Lockfile = @import("./lockfile.zig");
pub const MiniLockfile = @import("./mini_lockfile.zig").MiniLockfile;
pub const PatchedDep = Lockfile.PatchedDep;
const Walker = @import("../walker_skippable.zig");
@@ -2114,10 +2115,10 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type {
destination_dir.deleteTree(bun.span(this.destination_dir_subpath)) catch {};
}
pub fn uninstallBeforeInstall(this: *@This(), destination_dir: std.fs.Dir) void {
pub fn uninstallBeforeInstall(node_modules_path: string, destination_dir: std.fs.Dir, subpath: stringZ) void {
var rand_path_buf: [48]u8 = undefined;
const temp_path = std.fmt.bufPrintZ(&rand_path_buf, ".old-{}", .{std.fmt.fmtSliceHexUpper(std.mem.asBytes(&bun.fastRandom()))}) catch unreachable;
switch (bun.sys.renameat(bun.toFD(destination_dir), this.destination_dir_subpath, bun.toFD(destination_dir), temp_path)) {
switch (bun.sys.renameat(bun.toFD(destination_dir), subpath, bun.toFD(destination_dir), temp_path)) {
.err => {
// if it fails, that means the directory doesn't exist or was inaccessible
},
@@ -2152,8 +2153,14 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type {
var unintall_task: *@This() = @fieldParentPtr("task", task);
var debug_timer = bun.Output.DebugTimer.start();
defer {
_ = PackageManager.get().decrementPendingTasks();
PackageManager.get().wake();
var pm = PackageManager.get();
if (pm.options.log_level.showProgress()) {
if (pm.prune_node) |node| {
node.completeOne();
}
}
_ = pm.decrementPendingTasks();
pm.wake();
}
defer unintall_task.deinit();
@@ -2191,7 +2198,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type {
pub usingnamespace bun.New(@This());
};
var task = UninstallTask.new(.{
.absolute_path = bun.default_allocator.dupeZ(u8, bun.path.joinAbsString(FileSystem.instance.top_level_dir, &.{ this.node_modules.path.items, temp_path }, .auto)) catch bun.outOfMemory(),
.absolute_path = bun.default_allocator.dupeZ(u8, bun.path.joinAbsString(FileSystem.instance.top_level_dir, &.{ node_modules_path, temp_path }, .auto)) catch bun.outOfMemory(),
});
PackageManager.get().thread_pool.schedule(bun.ThreadPool.Batch.from(&task.task));
_ = PackageManager.get().incrementPendingTasks(1);
@@ -2252,7 +2259,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type {
const dest_path = this.destination_dir_subpath;
// If this fails, we don't care.
// we'll catch it the next error
if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstallBeforeInstall(destination_dir);
if (!skip_delete and !strings.eqlComptime(dest_path, ".")) uninstallBeforeInstall(this.node_modules.path.items, destination_dir, this.destination_dir_subpath);
const subdir = std.fs.path.dirname(dest_path);
@@ -2418,7 +2425,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type {
pub fn installImpl(this: *@This(), skip_delete: bool, destination_dir: std.fs.Dir, method_: Method, resolution_tag: Resolution.Tag) Result {
// If this fails, we don't care.
// we'll catch it the next error
if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstallBeforeInstall(destination_dir);
if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) uninstallBeforeInstall(this.node_modules.path.items, destination_dir, this.destination_dir_subpath);
var supported_method_to_use = method_;
@@ -2654,6 +2661,7 @@ pub const PackageManager = struct {
progress: Progress = .{},
downloads_node: ?*Progress.Node = null,
scripts_node: ?*Progress.Node = null,
prune_node: ?*Progress.Node = null,
progress_name_buf: [768]u8 = undefined,
progress_name_buf_dynamic: []u8 = &[_]u8{},
cpu_count: u32 = 0,
@@ -7764,6 +7772,11 @@ pub const PackageManager = struct {
const script_with_emoji: string = script_emoji ++ script_no_emoji_;
pub const script_emoji: string = " ⚙️ ";
pub const prune_no_emoji_ = "Pruning";
const prune_no_emoji: string = prune_no_emoji_ ++ "\n";
const prune_with_emoji: string = prune_emoji ++ prune_no_emoji_;
pub const prune_emoji: string = " 💥 ";
pub inline fn download() string {
return if (Output.isEmojiEnabled()) download_with_emoji else download_no_emoji;
}
@@ -7783,6 +7796,10 @@ pub const PackageManager = struct {
pub inline fn script() string {
return if (Output.isEmojiEnabled()) script_with_emoji else script_no_emoji;
}
pub inline fn prune() string {
return if (Output.isEmojiEnabled()) prune_with_emoji else prune_no_emoji;
}
};
pub const PackageJSONEditor = struct {
@@ -11111,7 +11128,7 @@ pub const PackageManager = struct {
}
}
fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(.node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(.node_modules).Next {
fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(Lockfile, .node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(Lockfile, .node_modules).Next {
while (iterator.next(null)) |node_modules| {
for (ids) |id| {
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue;
@@ -11121,7 +11138,7 @@ pub const PackageManager = struct {
return null;
}
fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(.node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(.node_modules).Next {
fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(Lockfile, .node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(Lockfile, .node_modules).Next {
while (iterator.next(null)) |node_modules| {
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue;
return node_modules;
@@ -11134,11 +11151,11 @@ pub const PackageManager = struct {
fn pkgInfoForNameAndVersion(
lockfile: *Lockfile,
iterator: *Lockfile.Tree.Iterator(.node_modules),
iterator: *Lockfile.Tree.Iterator(Lockfile, .node_modules),
pkg_maybe_version_to_patch: []const u8,
name: []const u8,
version: ?[]const u8,
) struct { PackageID, Lockfile.Tree.Iterator(.node_modules).Next } {
) struct { PackageID, Lockfile.Tree.Iterator(Lockfile, .node_modules).Next } {
var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator);
var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory();
defer pairs.deinit();
@@ -11312,7 +11329,7 @@ pub const PackageManager = struct {
const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument);
var folder_path_buf: bun.PathBuffer = undefined;
var iterator = Lockfile.Tree.Iterator(.node_modules).init(manager.lockfile);
var iterator = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(manager.lockfile);
var resolution_buf: [1024]u8 = undefined;
var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined;
@@ -11745,7 +11762,7 @@ pub const PackageManager = struct {
};
defer root_node_modules.close();
var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile);
var iterator = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(lockfile);
var resolution_buf: [1024]u8 = undefined;
const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) {
.path => result: {
@@ -12461,7 +12478,7 @@ pub const PackageManager = struct {
destination_dir_subpath_buf: bun.PathBuffer = undefined,
folder_path_buf: bun.PathBuffer = undefined,
successfully_installed: Bitset,
tree_iterator: *Lockfile.Tree.Iterator(.node_modules),
tree_iterator: *Lockfile.Tree.Iterator(Lockfile, .node_modules),
command_ctx: Command.Context,
current_tree_id: Lockfile.Tree.Id = Lockfile.Tree.invalid_id,
@@ -12623,7 +12640,9 @@ pub const PackageManager = struct {
this.seen_bin_links.clearRetainingCapacity();
this.node_modules.path.items.len = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir).len + 1;
const rel_path, _ = Lockfile.Tree.relativePathAndDepth(
lockfile,
lockfile.buffers.string_bytes.items,
lockfile.buffers.trees.items,
lockfile.buffers.dependencies.items,
@intCast(tree_id),
&node_modules_rel_path_buf,
&depth_buf,
@@ -13900,6 +13919,163 @@ pub const PackageManager = struct {
)));
}
pub fn deleteStalePackages(
this: *PackageManager,
node_modules_folder: bun.FD,
prune_node: *Progress.Node,
comptime log_level: Options.LogLevel,
) OOM!void {
const mini_lockfile: MiniLockfile = try MiniLockfile.loadFromDir(this.allocator, node_modules_folder) orelse {
try MiniLockfile.saveToDisk(node_modules_folder, this.lockfile);
return;
};
defer {
MiniLockfile.saveToDisk(node_modules_folder, this.lockfile) catch bun.outOfMemory();
mini_lockfile.deinit(this.allocator);
}
var path_buf: bun.PathBuffer = undefined;
const original_pending_task_count = this.pendingTaskCount();
if (comptime Environment.isDebug) {
bun.assert(original_pending_task_count == 0);
}
var new_iter = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(this.lockfile);
var curr = new_iter.next(null);
var existing_iter = Lockfile.Tree.Iterator(MiniLockfile, .node_modules).init(&mini_lockfile);
while (existing_iter.next(null)) |existing| {
const new = curr orelse {
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + existing.dependencies.len);
}
continue;
};
switch (strings.order(existing.relative_path, new.relative_path)) {
.eq => {
// compare dependencies from each tree
const deps = this.lockfile.buffers.dependencies.items;
const dep_names = mini_lockfile.dep_names;
const dep_behaviors = mini_lockfile.dep_behaviors;
var new_i: usize = 0;
for (0..existing.dependencies.len) |existing_i| {
const existing_dep_name = dep_names[existing.dependencies[existing_i]].slice(mini_lockfile.string_bytes);
const existing_dep_behavior = dep_behaviors[existing.dependencies[existing_i]];
if (new_i >= this.lockfile.buffers.dependencies.items.len) {
var remain: []u8 = &path_buf;
@memcpy(remain[0..existing_dep_name.len], existing_dep_name);
remain[existing_dep_name.len] = 0;
const existing_dep_name_z = remain[0..existing_dep_name.len :0];
remain = remain[existing_dep_name.len + 1 ..];
const trimmed_existing_path = strings.withoutLeadingPathSeparator(strings.withoutPrefixComptime(existing.relative_path, "node_modules"));
if (trimmed_existing_path.len == 0) {
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + 1);
}
PackageInstall.uninstallBeforeInstall(existing.relative_path, node_modules_folder.asDir(), existing_dep_name_z);
continue;
}
@memcpy(remain[0..trimmed_existing_path.len], trimmed_existing_path);
remain[trimmed_existing_path.len] = 0;
const trimmed_existing_path_z = remain[0..trimmed_existing_path.len :0];
remain = remain[trimmed_existing_path.len + 1 ..];
const dir = bun.openDir(node_modules_folder.asDir(), trimmed_existing_path_z) catch {
// probably already deleted
continue;
};
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + 1);
}
PackageInstall.uninstallBeforeInstall(existing.relative_path, dir, existing_dep_name_z);
continue;
}
const new_dep = deps[new.dependencies[new_i]];
const new_dep_behavior = new_dep.behavior;
const new_dep_name = new_dep.name.slice(this.lockfile.buffers.string_bytes.items);
switch (Dependency.order(existing_dep_name, existing_dep_behavior, new_dep_name, new_dep_behavior)) {
.eq => {
// keep it
new_i += 1;
},
.lt => {
// it will never exist, delete
var remain: []u8 = &path_buf;
@memcpy(remain[0..existing_dep_name.len], existing_dep_name);
remain[existing_dep_name.len] = 0;
const existing_dep_name_z = remain[0..existing_dep_name.len :0];
remain = remain[existing_dep_name.len + 1 ..];
const trimmed_existing_path = strings.withoutLeadingPathSeparator(strings.withoutPrefixComptime(existing.relative_path, "node_modules"));
if (trimmed_existing_path.len == 0) {
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + 1);
}
PackageInstall.uninstallBeforeInstall(existing.relative_path, node_modules_folder.asDir(), existing_dep_name_z);
continue;
}
@memcpy(remain[0..trimmed_existing_path.len], trimmed_existing_path);
remain[trimmed_existing_path.len] = 0;
const trimmed_existing_path_z = remain[0..trimmed_existing_path.len :0];
remain = remain[trimmed_existing_path.len + 1 ..];
const dir = bun.openDir(node_modules_folder.asDir(), trimmed_existing_path_z) catch {
// probably already deleted
continue;
};
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + 1);
}
PackageInstall.uninstallBeforeInstall(existing.relative_path, dir, existing_dep_name_z);
},
.gt => {
// catch up
new_i += 1;
},
}
}
curr = new_iter.next(null);
},
.lt => {
// it will never exist, delete
if (comptime log_level.showProgress()) {
prune_node.setEstimatedTotalItems(prune_node.unprotected_estimated_total_items + existing.dependencies.len);
}
},
.gt => {
// catch up
curr = new_iter.next(null);
},
}
}
while (this.pendingTaskCount() > original_pending_task_count) {
this.sleep();
}
if (comptime log_level.showProgress()) {
prune_node.end();
}
}
pub fn installPackages(
this: *PackageManager,
ctx: Command.Context,
@@ -13921,6 +14097,7 @@ pub const PackageManager = struct {
var download_node: Progress.Node = undefined;
var install_node: Progress.Node = undefined;
var scripts_node: Progress.Node = undefined;
var prune_node: Progress.Node = undefined;
const options = &this.options;
var progress = &this.progress;
@@ -13931,8 +14108,10 @@ pub const PackageManager = struct {
install_node = root_node.start(ProgressStrings.install(), this.lockfile.buffers.hoisted_dependencies.items.len);
scripts_node = root_node.start(ProgressStrings.script(), 0);
prune_node = root_node.start(ProgressStrings.prune(), 0);
this.downloads_node = &download_node;
this.scripts_node = &scripts_node;
this.prune_node = &prune_node;
}
defer {
@@ -13950,26 +14129,37 @@ pub const PackageManager = struct {
// no need to download packages you've already installed!!
var new_node_modules = false;
const cwd = std.fs.cwd();
const node_modules_folder = brk: {
const node_modules_folder = node_modules_folder: {
// Attempt to open the existing node_modules folder
switch (bun.sys.openatOSPath(bun.toFD(cwd), bun.OSPathLiteral("node_modules"), bun.O.DIRECTORY | bun.O.RDONLY, 0o755)) {
.result => |fd| break :brk std.fs.Dir{ .fd = fd.cast() },
.result => |fd| {
try this.deleteStalePackages(fd, &prune_node, log_level);
break :node_modules_folder std.fs.Dir{ .fd = fd.cast() };
},
.err => {},
}
new_node_modules = true;
// root node_modules is clean, make sure node_modules for each workspace
// is clean
// Attempt to create a new node_modules folder
bun.sys.mkdir("node_modules", 0o755).unwrap() catch |err| {
if (err != error.EEXIST) {
Output.prettyErrorln("<r><red>error<r>: <b><red>{s}<r> creating <b>node_modules<r> folder", .{@errorName(err)});
Output.errGeneric("<b><red>{s}<r> creating <b>node_modules<r> folder", .{@errorName(err)});
Global.crash();
}
};
break :brk bun.openDir(cwd, "node_modules") catch |err| {
Output.prettyErrorln("<r><red>error<r>: <b><red>{s}<r> opening <b>node_modules<r> folder", .{@errorName(err)});
const dir = bun.openDir(cwd, "node_modules") catch |err| {
Output.errGeneric("<b><red>{s}<r> opening <b>node_modules<r> folder", .{@errorName(err)});
Global.crash();
};
MiniLockfile.saveToDisk(bun.toFD(dir), this.lockfile) catch bun.outOfMemory();
break :node_modules_folder dir;
};
var skip_delete = new_node_modules;
@@ -13983,7 +14173,7 @@ pub const PackageManager = struct {
var summary = PackageInstall.Summary{};
{
var iterator = Lockfile.Tree.Iterator(.node_modules).init(this.lockfile);
var iterator = Lockfile.Tree.Iterator(Lockfile, .node_modules).init(this.lockfile);
if (comptime Environment.isPosix) {
Bin.Linker.ensureUmask();
}

View File

@@ -16,6 +16,7 @@ const JSAst = bun.JSAst;
const TextLockfile = @import("./bun.lock.zig");
const OOM = bun.OOM;
const WorkspaceFilter = PackageManager.WorkspaceFilter;
const MiniLockfile = Install.MiniLockfile;
const JSLexer = bun.js_lexer;
const logger = bun.logger;
@@ -462,20 +463,32 @@ pub const Tree = struct {
pub const List = std.ArrayListUnmanaged(Tree);
pub const Id = u32;
pub fn folderName(this: *const Tree, deps: []const Dependency, buf: string) string {
fn folderName(this: *const Tree, deps_or_names: anytype, buf: string) string {
const dep_id = this.dependency_id;
if (dep_id == invalid_dependency_id) return "";
return deps[dep_id].name.slice(buf);
switch (comptime std.meta.Child(@TypeOf(deps_or_names))) {
Dependency => {
return deps_or_names[dep_id].name.slice(buf);
},
String => {
return deps_or_names[dep_id].slice(buf);
},
else => {
@compileError("unexpected type");
},
}
}
pub fn toExternal(this: Tree) External {
var out = External{};
out[0..4].* = @as(Id, @bitCast(this.id));
out[4..8].* = @as(Id, @bitCast(this.dependency_id));
out[8..12].* = @as(Id, @bitCast(this.parent));
out[12..16].* = @as(u32, @bitCast(this.dependencies.off));
out[16..20].* = @as(u32, @bitCast(this.dependencies.len));
var out: External = undefined;
if (out.len != 20) @compileError("Tree.External is not 20 bytes");
var stream = std.io.fixedBufferStream(&out);
var writer = stream.writer();
writer.writeInt(Id, this.id, .little) catch unreachable;
writer.writeInt(DependencyID, this.dependency_id, .little) catch unreachable;
writer.writeInt(Id, this.parent, .little) catch unreachable;
writer.writeInt(u32, this.dependencies.off, .little) catch unreachable;
writer.writeInt(u32, this.dependencies.len, .little) catch unreachable;
return out;
}
@@ -523,16 +536,16 @@ pub const Tree = struct {
pkg_path,
};
pub fn Iterator(comptime path_style: IteratorPathStyle) type {
pub fn Iterator(comptime LockfileType: type, comptime path_style: IteratorPathStyle) type {
return struct {
tree_id: Id,
path_buf: bun.PathBuffer = undefined,
lockfile: *const Lockfile,
lockfile: *const LockfileType,
depth_stack: DepthBuf = undefined,
pub fn init(lockfile: *const Lockfile) @This() {
pub fn init(lockfile: *const LockfileType) @This() {
var iter: @This() = .{
.tree_id = 0,
.lockfile = lockfile,
@@ -563,7 +576,21 @@ pub const Tree = struct {
};
pub fn next(this: *@This(), completed_trees: if (path_style == .node_modules) ?*Bitset else void) ?Next {
const trees = this.lockfile.buffers.trees.items;
const trees, const hoisted_dependencies, const string_bytes, const deps_or_names = switch (LockfileType) {
Lockfile => .{
this.lockfile.buffers.trees.items,
this.lockfile.buffers.hoisted_dependencies.items,
this.lockfile.buffers.string_bytes.items,
this.lockfile.buffers.dependencies.items,
},
MiniLockfile => .{
this.lockfile.trees,
this.lockfile.hoisted_dependencies,
this.lockfile.string_bytes,
this.lockfile.dep_names,
},
else => @compileError("unexpected lockfile type"),
};
if (this.tree_id >= trees.len) return null;
@@ -579,10 +606,12 @@ pub const Tree = struct {
const current_tree_id = this.tree_id;
const tree = trees[current_tree_id];
const tree_dependencies = tree.dependencies.get(this.lockfile.buffers.hoisted_dependencies.items);
const tree_dependencies = tree.dependencies.get(hoisted_dependencies);
const relative_path, const depth = relativePathAndDepth(
this.lockfile,
string_bytes,
trees,
deps_or_names,
current_tree_id,
&this.path_buf,
&this.depth_stack,
@@ -603,13 +632,14 @@ pub const Tree = struct {
/// Returns relative path and the depth of the tree
pub fn relativePathAndDepth(
lockfile: *const Lockfile,
string_buf: string,
trees: []const Tree,
deps_or_names: anytype,
tree_id: Id,
path_buf: *bun.PathBuffer,
depth_buf: *DepthBuf,
comptime path_style: IteratorPathStyle,
) struct { stringZ, usize } {
const trees = lockfile.buffers.trees.items;
var depth: usize = 0;
const tree = trees[tree_id];
@@ -623,8 +653,6 @@ pub const Tree = struct {
depth_buf[0] = 0;
if (tree.id > 0) {
const dependencies = lockfile.buffers.dependencies.items;
const buf = lockfile.buffers.string_bytes.items;
var depth_buf_len: usize = 1;
while (parent_id > 0 and parent_id < trees.len) {
@@ -648,7 +676,7 @@ pub const Tree = struct {
}
const id = depth_buf[depth_buf_len];
const name = trees[id].folderName(dependencies, buf);
const name = trees[id].folderName(deps_or_names, string_buf);
@memcpy(path_buf[path_written..][0..name.len], name);
path_written += name.len;
@@ -6297,6 +6325,13 @@ const Buffers = struct {
comptime assertNoUninitializedPadding(@TypeOf(array));
const bytes = std.mem.sliceAsBytes(array);
const item_info = @typeInfo(std.meta.Child(ArrayList));
if (item_info == .Struct) {
if (item_info.Struct.layout == .auto) {
@compileError("attempt to serialize non-extern struct");
}
}
const start_pos = try stream.getPos();
try writer.writeInt(u64, 0xDEADBEEF, .little);
try writer.writeInt(u64, 0xDEADBEEF, .little);
@@ -6398,8 +6433,9 @@ const Buffers = struct {
} else {
const list = @field(buffers, name);
const items = list.items;
const Type = @TypeOf(items);
if (comptime Type == Tree) {
const ArrayType = @TypeOf(items);
const ItemType = std.meta.Child(ArrayType);
if (comptime ItemType == Tree) {
// We duplicate it here so that alignment bytes are zeroed out
var clone = try std.ArrayListUnmanaged(Tree.External).initCapacity(allocator, list.items.len);
for (list.items) |item| {
@@ -6407,14 +6443,14 @@ const Buffers = struct {
}
defer clone.deinit(allocator);
try writeArray(StreamType, stream, Writer, writer, Tree.External, clone.items);
try writeArray(StreamType, stream, Writer, writer, []const Tree.External, clone.items);
} else {
// We duplicate it here so that alignment bytes are zeroed out
var clone = try std.ArrayListUnmanaged(std.meta.Child(Type)).initCapacity(allocator, list.items.len);
var clone = try std.ArrayListUnmanaged(ItemType).initCapacity(allocator, list.items.len);
clone.appendSliceAssumeCapacity(items);
defer clone.deinit(allocator);
try writeArray(StreamType, stream, Writer, writer, Type, clone.items);
try writeArray(StreamType, stream, Writer, writer, ArrayType, clone.items);
}
}
@@ -7014,7 +7050,15 @@ pub fn eql(l: *const Lockfile, r: *const Lockfile, cut_off_pkg_id: usize, alloca
var i: usize = 0;
for (l.buffers.trees.items) |l_tree| {
const rel_path, _ = Tree.relativePathAndDepth(l, l_tree.id, &path_buf, &depth_buf, .pkg_path);
const rel_path, _ = Tree.relativePathAndDepth(
l.buffers.string_bytes.items,
l.buffers.trees.items,
l.buffers.dependencies.items,
l_tree.id,
&path_buf,
&depth_buf,
.pkg_path,
);
const tree_path = try allocator.dupe(u8, rel_path);
for (l_tree.dependencies.get(l_hoisted_deps)) |l_dep_id| {
if (l_dep_id == invalid_dependency_id) continue;
@@ -7031,7 +7075,15 @@ pub fn eql(l: *const Lockfile, r: *const Lockfile, cut_off_pkg_id: usize, alloca
i = 0;
for (r.buffers.trees.items) |r_tree| {
const rel_path, _ = Tree.relativePathAndDepth(r, r_tree.id, &path_buf, &depth_buf, .pkg_path);
const rel_path, _ = Tree.relativePathAndDepth(
r.buffers.string_bytes.items,
r.buffers.trees.items,
r.buffers.dependencies.items,
r_tree.id,
&path_buf,
&depth_buf,
.pkg_path,
);
const tree_path = try allocator.dupe(u8, rel_path);
for (r_tree.dependencies.get(r_hoisted_deps)) |r_dep_id| {
if (r_dep_id == invalid_dependency_id) continue;
@@ -7511,7 +7563,9 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
try w.write(tree_id);
const relative_path, const depth = Lockfile.Tree.relativePathAndDepth(
this,
this.buffers.string_bytes.items,
this.buffers.trees.items,
this.buffers.dependencies.items,
@intCast(tree_id),
&path_buf,
&depth_buf,

View File

@@ -0,0 +1,158 @@
const std = @import("std");
const bun = @import("root").bun;
const string = bun.string;
const String = bun.Semver.String;
const Lockfile = bun.install.Lockfile;
const OOM = bun.OOM;
const File = bun.sys.File;
const Output = bun.Output;
const DependencyID = bun.install.DependencyID;
const Dependency = bun.install.Dependency;
pub const MiniLockfile = struct {
version: Version,
dep_names: []const String,
dep_behaviors: []const Dependency.Behavior,
trees: []const Lockfile.Tree,
string_bytes: []const u8,
hoisted_dependencies: []const DependencyID,
pub const Version = enum(u32) {
v0,
_,
pub const current = Version.v0;
};
pub fn deinit(mini_lockfile: *const @This(), allocator: std.mem.Allocator) void {
allocator.free(mini_lockfile.string_bytes);
}
pub fn loadFromDir(allocator: std.mem.Allocator, dir: bun.FD) OOM!?@This() {
const file = File.openatOSPath(dir, bun.OSPathLiteral(".bun.lockb"), bun.O.RDONLY, 0).unwrap() catch {
return null;
};
defer file.close();
const bytes = file.readToEnd(allocator).unwrap() catch {
return null;
};
var stream = std.io.fixedBufferStream(bytes);
return deserialize(allocator, stream.reader()) catch |err| {
// for any error, delete and ignore
Output.warn("invalid hidden lockfile: {s}", .{@errorName(err)});
Output.flush();
_ = bun.sys.unlinkat(dir, bun.OSPathLiteral(".bun.lockb"));
return null;
};
}
fn deserialize(allocator: std.mem.Allocator, reader: anytype) !@This() {
const version_num = try reader.readInt(u32, .little);
const version = try std.meta.intToEnum(Version, version_num);
const hoisted_dependencies_len = try reader.readInt(usize, .little);
const hoisted_dependencies = try allocator.alloc(DependencyID, hoisted_dependencies_len);
for (hoisted_dependencies) |*hoisted_dependency| {
hoisted_dependency.* = try reader.readInt(DependencyID, .little);
}
const trees_len = try reader.readInt(usize, .little);
const trees = try allocator.alloc(Lockfile.Tree, trees_len);
var external_tree: Lockfile.Tree.External = undefined;
for (trees) |*tree| {
const len = try reader.readAll(&external_tree);
if (len != Lockfile.Tree.external_size) {
return error.InvalidTree;
}
tree.* = Lockfile.Tree.toTree(external_tree);
}
const deps_len = try reader.readInt(usize, .little);
const dep_behaviors = try allocator.alloc(Dependency.Behavior, deps_len);
for (dep_behaviors) |*dep_behavior| {
dep_behavior.* = @bitCast(try reader.readInt(u8, .little));
}
const dep_names = try allocator.alloc(String, deps_len);
for (dep_names) |*dep_name| {
const len = try reader.readAll(&dep_name.bytes);
if (len != String.max_inline_len) {
return error.InvalidExternalString;
}
}
const string_buf_len = try reader.readInt(usize, .little);
const string_buf = try allocator.alloc(u8, string_buf_len);
const len = try reader.readAll(string_buf);
if (len != string_buf_len) {
return error.InvalidStringBuf;
}
return .{
.version = version,
.trees = trees,
.dep_names = dep_names,
.dep_behaviors = dep_behaviors,
.string_bytes = string_buf,
.hoisted_dependencies = hoisted_dependencies,
};
}
pub fn saveToDisk(dir: bun.FD, lockfile: *const Lockfile) OOM!void {
var bytes = std.ArrayList(u8).init(lockfile.allocator);
defer bytes.deinit();
try serialize(lockfile, bytes.writer());
const file = File.openat(dir, ".bun.lockb", bun.O.CREAT | bun.O.WRONLY, 0o644).unwrap() catch |err| {
Output.warn("failed to create hidden lockfile 'node_modules/.bun.lockb': {s}", .{@errorName(err)});
Output.flush();
return;
};
file.writeAll(bytes.items).unwrap() catch |err| {
Output.warn("failed to write hidden lockfile 'node_modules/.bun.lockb': {s}", .{@errorName(err)});
Output.flush();
_ = bun.sys.unlinkat(dir, bun.OSPathLiteral(".bun.lockb"));
return;
};
file.close();
}
fn serialize(lockfile: *const Lockfile, writer: anytype) OOM!void {
try writer.writeInt(u32, @intFromEnum(Version.current), .little);
try writer.writeInt(usize, lockfile.buffers.hoisted_dependencies.items.len, .little);
try writer.writeAll(std.mem.sliceAsBytes(lockfile.buffers.hoisted_dependencies.items));
try writer.writeInt(usize, lockfile.buffers.trees.items.len, .little);
for (lockfile.buffers.trees.items) |tree| {
try writer.writeAll(&Lockfile.Tree.toExternal(tree));
}
var bytes: std.ArrayListUnmanaged(u8) = .{};
defer bytes.deinit(lockfile.allocator);
var pool = String.Builder.StringPool.init(lockfile.allocator);
defer pool.deinit();
var new_string_buf = String.Buf.init(lockfile.allocator, &bytes, &pool);
try writer.writeInt(usize, lockfile.buffers.dependencies.items.len, .little);
for (lockfile.buffers.dependencies.items) |dep| {
try writer.writeInt(u8, @bitCast(dep.behavior), .little);
}
for (lockfile.buffers.dependencies.items) |dep| {
const name = try new_string_buf.append(dep.name.slice(lockfile.buffers.string_bytes.items));
try writer.writeAll(&name.bytes);
}
try writer.writeInt(usize, bytes.items.len, .little);
try writer.writeAll(bytes.items);
}
};

View File

@@ -43,7 +43,15 @@ pub const String = extern struct {
allocator: std.mem.Allocator,
pool: *Builder.StringPool,
pub fn init(lockfile: *const Lockfile) Buf {
pub fn init(allocator: std.mem.Allocator, bytes: *std.ArrayListUnmanaged(u8), pool: *Builder.StringPool) Buf {
return .{
.bytes = bytes,
.allocator = allocator,
.pool = pool,
};
}
pub fn initFromLockfile(lockfile: *const Lockfile) Buf {
return .{
.bytes = &lockfile.buffers.string_bytes,
.allocator = lockfile.allocator,