diff --git a/src/install/PackageManager/install_with_manager.zig b/src/install/PackageManager/install_with_manager.zig index a39f120192..9047d9f04b 100644 --- a/src/install/PackageManager/install_with_manager.zig +++ b/src/install/PackageManager/install_with_manager.zig @@ -782,7 +782,7 @@ pub fn installWithManager( (did_meta_hash_change or had_any_diffs or manager.update_requests.len > 0 or - (load_result == .ok and load_result.ok.serializer_result.packages_need_update) or + (load_result == .ok and (load_result.ok.serializer_result.packages_need_update or load_result.ok.serializer_result.migrated_from_lockb_v2)) or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile)); diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 468a7f0b7c..2b9b51d96a 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -1585,9 +1585,11 @@ pub const FormatVersion = enum(u32) { // bun v0.1.7+ // This change added tarball URLs to npm-resolved packages v2 = 2, + // Changed semver major/minor/patch to each use u64 instead of u32 + v3 = 3, _, - pub const current = FormatVersion.v2; + pub const current = FormatVersion.v3; }; pub const PackageIDSlice = ExternalSlice(PackageID); @@ -1607,7 +1609,7 @@ pub const Buffers = @import("./lockfile/Buffers.zig"); pub const Serializer = @import("./lockfile/bun.lockb.zig"); pub const CatalogMap = @import("./lockfile/CatalogMap.zig"); pub const OverrideMap = @import("./lockfile/OverrideMap.zig"); -pub const Package = @import("./lockfile/Package.zig").Package; +pub const Package = @import("./lockfile/Package.zig").Package(u64); pub const Tree = @import("./lockfile/Tree.zig"); pub fn deinit(this: *Lockfile) void { diff --git a/src/install/lockfile/Package.zig b/src/install/lockfile/Package.zig index 418f7d4ab2..9c49745d83 100644 --- a/src/install/lockfile/Package.zig +++ b/src/install/lockfile/Package.zig @@ -1,651 +1,623 @@ -pub const Package = extern struct { - name: String = .{}, - name_hash: PackageNameHash = 0, +pub fn Package(comptime SemverIntType: type) type { + return extern struct { + name: String = .{}, + name_hash: PackageNameHash = 0, - /// How this package has been resolved - /// When .tag is uninitialized, that means the package is not resolved yet. - resolution: Resolution = .{}, + /// How this package has been resolved + /// When .tag is uninitialized, that means the package is not resolved yet. + resolution: Resolution = .{}, - /// dependencies & resolutions must be the same length - /// resolutions[i] is the resolved package ID for dependencies[i] - /// if resolutions[i] is an invalid package ID, then dependencies[i] is not resolved - dependencies: DependencySlice = .{}, + /// dependencies & resolutions must be the same length + /// resolutions[i] is the resolved package ID for dependencies[i] + /// if resolutions[i] is an invalid package ID, then dependencies[i] is not resolved + dependencies: DependencySlice = .{}, - /// The resolved package IDs for this package's dependencies. Instead of storing this - /// on the `Dependency` struct within `.dependencies`, it is stored on the package itself - /// so we can access it faster. - /// - /// Each index in this array corresponds to the same index in dependencies. - /// Each value in this array corresponds to the resolved package ID for that dependency. - /// - /// So this is how you say "what package ID for lodash does this package actually resolve to?" - /// - /// By default, the underlying buffer is filled with "invalid_id" to indicate this package ID - /// was not resolved - resolutions: PackageIDSlice = .{}, + /// The resolved package IDs for this package's dependencies. Instead of storing this + /// on the `Dependency` struct within `.dependencies`, it is stored on the package itself + /// so we can access it faster. + /// + /// Each index in this array corresponds to the same index in dependencies. + /// Each value in this array corresponds to the resolved package ID for that dependency. + /// + /// So this is how you say "what package ID for lodash does this package actually resolve to?" + /// + /// By default, the underlying buffer is filled with "invalid_id" to indicate this package ID + /// was not resolved + resolutions: PackageIDSlice = .{}, - meta: Meta = Meta.init(), - bin: Bin = .{}, + meta: Meta = Meta.init(), + bin: Bin = .{}, - /// If any of these scripts run, they will run in order: - /// 1. preinstall - /// 2. install - /// 3. postinstall - /// 4. preprepare - /// 5. prepare - /// 6. postprepare - scripts: Package.Scripts = .{}, + /// If any of these scripts run, they will run in order: + /// 1. preinstall + /// 2. install + /// 3. postinstall + /// 4. preprepare + /// 5. prepare + /// 6. postprepare + scripts: Scripts = .{}, - pub const Scripts = @import("./Package/Scripts.zig").Scripts; - pub const Meta = @import("./Package/Meta.zig").Meta; - pub const WorkspaceMap = @import("./Package/WorkspaceMap.zig"); + const PackageType = @This(); - pub const DependencyGroup = struct { - prop: string, - field: string, - behavior: Behavior, + const Resolution = ResolutionType(SemverIntType); - pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = .{ .prod = true } }; - pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = .{ .dev = true } }; - pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = .{ .optional = true } }; - pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = .{ .peer = true } }; - pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = .{ .workspace = true } }; - }; + pub const Scripts = @import("./Package/Scripts.zig").Scripts; + pub const Meta = @import("./Package/Meta.zig").Meta; + pub const WorkspaceMap = @import("./Package/WorkspaceMap.zig"); - pub inline fn isDisabled(this: *const Package, cpu: Npm.Architecture, os: Npm.OperatingSystem) bool { - return this.meta.isDisabled(cpu, os); - } + pub const DependencyGroup = struct { + prop: string, + field: string, + behavior: Behavior, - pub const Alphabetizer = struct { - names: []const String, - buf: []const u8, - resolutions: []const Resolution, - - pub fn isAlphabetical(ctx: Alphabetizer, lhs: PackageID, rhs: PackageID) bool { - return switch (ctx.names[lhs].order(&ctx.names[rhs], ctx.buf, ctx.buf)) { - .eq => ctx.resolutions[lhs].order(&ctx.resolutions[rhs], ctx.buf, ctx.buf) == .lt, - .lt => true, - .gt => false, - }; - } - }; - - const debug = Output.scoped(.Lockfile, .hidden); - - pub fn clone( - this: *const Package, - pm: *PackageManager, - old: *Lockfile, - new: *Lockfile, - package_id_mapping: []PackageID, - cloner: *Cloner, - ) !PackageID { - const old_string_buf = old.buffers.string_bytes.items; - const old_extern_string_buf = old.buffers.extern_strings.items; - var builder_ = new.stringBuilder(); - var builder = &builder_; - debug("Clone: {s}@{any} ({s}, {d} dependencies)", .{ - this.name.slice(old_string_buf), - this.resolution.fmt(old_string_buf, .auto), - @tagName(this.resolution.tag), - this.dependencies.len, - }); - - builder.count(this.name.slice(old_string_buf)); - this.resolution.count(old_string_buf, *Lockfile.StringBuilder, builder); - this.meta.count(old_string_buf, *Lockfile.StringBuilder, builder); - this.scripts.count(old_string_buf, *Lockfile.StringBuilder, builder); - for (old.patched_dependencies.values()) |patched_dep| builder.count(patched_dep.path.slice(old.buffers.string_bytes.items)); - const new_extern_string_count = this.bin.count(old_string_buf, old_extern_string_buf, *Lockfile.StringBuilder, builder); - const old_dependencies: []const Dependency = this.dependencies.get(old.buffers.dependencies.items); - const old_resolutions: []const PackageID = this.resolutions.get(old.buffers.resolutions.items); - - for (old_dependencies) |dependency| { - dependency.count(old_string_buf, *Lockfile.StringBuilder, builder); - } - - try builder.allocate(); - - // should be unnecessary, but Just In Case - try new.buffers.dependencies.ensureUnusedCapacity(new.allocator, old_dependencies.len); - try new.buffers.resolutions.ensureUnusedCapacity(new.allocator, old_dependencies.len); - try new.buffers.extern_strings.ensureUnusedCapacity(new.allocator, new_extern_string_count); - - const prev_len = @as(u32, @truncate(new.buffers.dependencies.items.len)); - const end = prev_len + @as(u32, @truncate(old_dependencies.len)); - const max_package_id = @as(PackageID, @truncate(old.packages.len)); - - new.buffers.dependencies.items = new.buffers.dependencies.items.ptr[0..end]; - new.buffers.resolutions.items = new.buffers.resolutions.items.ptr[0..end]; - - new.buffers.extern_strings.items.len += new_extern_string_count; - const new_extern_strings = new.buffers.extern_strings.items[new.buffers.extern_strings.items.len - new_extern_string_count ..]; - - const dependencies: []Dependency = new.buffers.dependencies.items[prev_len..end]; - const resolutions: []PackageID = new.buffers.resolutions.items[prev_len..end]; - - const id = @as(PackageID, @truncate(new.packages.len)); - const new_package = try new.appendPackageWithID( - .{ - .name = builder.appendWithHash( - String, - this.name.slice(old_string_buf), - this.name_hash, - ), - .bin = this.bin.clone( - old_string_buf, - old_extern_string_buf, - new.buffers.extern_strings.items, - new_extern_strings, - *Lockfile.StringBuilder, - builder, - ), - .name_hash = this.name_hash, - .meta = this.meta.clone( - id, - old_string_buf, - *Lockfile.StringBuilder, - builder, - ), - .resolution = this.resolution.clone( - old_string_buf, - *Lockfile.StringBuilder, - builder, - ), - .scripts = this.scripts.clone( - old_string_buf, - *Lockfile.StringBuilder, - builder, - ), - .dependencies = .{ .off = prev_len, .len = end - prev_len }, - .resolutions = .{ .off = prev_len, .len = end - prev_len }, - }, - id, - ); - - package_id_mapping[this.meta.id] = new_package.meta.id; - - if (cloner.manager.preinstall_state.items.len > 0) { - cloner.manager.preinstall_state.items[new_package.meta.id] = cloner.old_preinstall_state.items[this.meta.id]; - } - - for (old_dependencies, dependencies) |old_dep, *new_dep| { - new_dep.* = try old_dep.clone( - pm, - old_string_buf, - *Lockfile.StringBuilder, - builder, - ); - } - - builder.clamp(); - - cloner.trees_count += @as(u32, @intFromBool(old_resolutions.len > 0)); - - for (old_resolutions, resolutions, 0..) |old_resolution, *resolution, i| { - if (old_resolution >= max_package_id) { - resolution.* = invalid_package_id; - continue; - } - - const mapped = package_id_mapping[old_resolution]; - if (mapped < max_package_id) { - resolution.* = mapped; - } else { - try cloner.clone_queue.append(.{ - .old_resolution = old_resolution, - .parent = new_package.meta.id, - .resolve_id = new_package.resolutions.off + @as(PackageID, @intCast(i)), - }); - } - } - - return new_package.meta.id; - } - - pub fn fromPackageJSON( - lockfile: *Lockfile, - pm: *PackageManager, - package_json: *PackageJSON, - comptime features: Features, - ) !Package { - var package = Package{}; - - // var string_buf = package_json; - - var string_builder = lockfile.stringBuilder(); - - var total_dependencies_count: u32 = 0; - // var bin_extern_strings_count: u32 = 0; - - // --- Counting - { - string_builder.count(package_json.name); - string_builder.count(package_json.version); - const dependencies = package_json.dependencies.map.values(); - for (dependencies) |dep| { - if (dep.behavior.isEnabled(features)) { - dep.count(package_json.dependencies.source_buf, @TypeOf(&string_builder), &string_builder); - total_dependencies_count += 1; - } - } - } - - // string_builder.count(manifest.str(&package_version_ptr.tarball_url)); - - try string_builder.allocate(); - defer string_builder.clamp(); - // var extern_strings_list = &lockfile.buffers.extern_strings; - var dependencies_list = &lockfile.buffers.dependencies; - var resolutions_list = &lockfile.buffers.resolutions; - try dependencies_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - try resolutions_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - // try extern_strings_list.ensureUnusedCapacity(lockfile.allocator, bin_extern_strings_count); - // extern_strings_list.items.len += bin_extern_strings_count; - - // -- Cloning - { - const package_name: ExternalString = string_builder.append(ExternalString, package_json.name); - package.name_hash = package_name.hash; - package.name = package_name.value; - - package.resolution = .{ - .tag = .root, - .value = .{ .root = {} }, - }; - - const total_len = dependencies_list.items.len + total_dependencies_count; - if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len); - - var dependencies: []Dependency = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; - @memset(dependencies, Dependency{}); - - const package_dependencies = package_json.dependencies.map.values(); - const source_buf = package_json.dependencies.source_buf; - for (package_dependencies) |dep| { - if (!dep.behavior.isEnabled(features)) continue; - - dependencies[0] = try dep.clone(pm, source_buf, @TypeOf(&string_builder), &string_builder); - dependencies = dependencies[1..]; - if (dependencies.len == 0) break; - } - - // We lose the bin info here - // package.bin = package_version.bin.clone(string_buf, manifest.extern_strings_bin_entries, extern_strings_list.items, extern_strings_slice, @TypeOf(&string_builder), &string_builder); - // and the integriy hash - // package.meta.integrity = package_version.integrity; - - package.meta.arch = package_json.arch; - package.meta.os = package_json.os; - - package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len)); - package.dependencies.len = total_dependencies_count - @as(u32, @truncate(dependencies.len)); - package.resolutions.off = package.dependencies.off; - package.resolutions.len = package.dependencies.len; - - const new_length = package.dependencies.len + dependencies_list.items.len; - - @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); - - dependencies_list.items = dependencies_list.items.ptr[0..new_length]; - resolutions_list.items = resolutions_list.items.ptr[0..new_length]; - - return package; - } - } - - pub fn fromNPM( - pm: *PackageManager, - allocator: Allocator, - lockfile: *Lockfile, - log: *logger.Log, - manifest: *const Npm.PackageManifest, - version: Semver.Version, - package_version_ptr: *const Npm.PackageVersion, - string_buf: []const u8, - comptime features: Features, - ) !Package { - var package = Package{}; - - const package_version = package_version_ptr.*; - - const dependency_groups = comptime brk: { - var out_groups: [ - @as(usize, @intFromBool(features.dependencies)) + - @as(usize, @intFromBool(features.dev_dependencies)) + - @as(usize, @intFromBool(features.optional_dependencies)) + - @as(usize, @intFromBool(features.peer_dependencies)) - ]DependencyGroup = undefined; - var out_group_i: usize = 0; - - if (features.dependencies) { - out_groups[out_group_i] = DependencyGroup.dependencies; - out_group_i += 1; - } - if (features.dev_dependencies) { - out_groups[out_group_i] = DependencyGroup.dev; - out_group_i += 1; - } - - if (features.optional_dependencies) { - out_groups[out_group_i] = DependencyGroup.optional; - out_group_i += 1; - } - - if (features.peer_dependencies) { - out_groups[out_group_i] = DependencyGroup.peer; - out_group_i += 1; - } - - break :brk out_groups; + pub const dependencies = DependencyGroup{ .prop = "dependencies", .field = "dependencies", .behavior = .{ .prod = true } }; + pub const dev = DependencyGroup{ .prop = "devDependencies", .field = "dev_dependencies", .behavior = .{ .dev = true } }; + pub const optional = DependencyGroup{ .prop = "optionalDependencies", .field = "optional_dependencies", .behavior = .{ .optional = true } }; + pub const peer = DependencyGroup{ .prop = "peerDependencies", .field = "peer_dependencies", .behavior = .{ .peer = true } }; + pub const workspaces = DependencyGroup{ .prop = "workspaces", .field = "workspaces", .behavior = .{ .workspace = true } }; }; - var string_builder = lockfile.stringBuilder(); + pub inline fn isDisabled(this: *const @This(), cpu: Npm.Architecture, os: Npm.OperatingSystem) bool { + return this.meta.isDisabled(cpu, os); + } - var total_dependencies_count: u32 = 0; - var bin_extern_strings_count: u32 = 0; + pub const Alphabetizer = struct { + names: []const String, + buf: []const u8, + resolutions: []const Resolution, - // --- Counting - { - string_builder.count(manifest.name()); - version.count(string_buf, @TypeOf(&string_builder), &string_builder); + pub fn isAlphabetical(ctx: Alphabetizer, lhs: PackageID, rhs: PackageID) bool { + return switch (ctx.names[lhs].order(&ctx.names[rhs], ctx.buf, ctx.buf)) { + .eq => ctx.resolutions[lhs].order(&ctx.resolutions[rhs], ctx.buf, ctx.buf) == .lt, + .lt => true, + .gt => false, + }; + } + }; - inline for (dependency_groups) |group| { - const map: ExternalStringMap = @field(package_version, group.field); - const keys = map.name.get(manifest.external_strings); - const version_strings = map.value.get(manifest.external_strings_for_versions); - total_dependencies_count += map.value.len; + const debug = Output.scoped(.Lockfile, .hidden); - if (comptime Environment.isDebug) assert(keys.len == version_strings.len); + pub fn clone( + this: *const @This(), + pm: *PackageManager, + old: *Lockfile, + new: *Lockfile, + package_id_mapping: []PackageID, + cloner: *Cloner, + ) !PackageID { + const old_string_buf = old.buffers.string_bytes.items; + const old_extern_string_buf = old.buffers.extern_strings.items; + var builder_ = new.stringBuilder(); + var builder = &builder_; + debug("Clone: {s}@{any} ({s}, {d} dependencies)", .{ + this.name.slice(old_string_buf), + this.resolution.fmt(old_string_buf, .auto), + @tagName(this.resolution.tag), + this.dependencies.len, + }); - for (keys, version_strings) |key, ver| { - string_builder.count(key.slice(string_buf)); - string_builder.count(ver.slice(string_buf)); + builder.count(this.name.slice(old_string_buf)); + this.resolution.count(old_string_buf, *Lockfile.StringBuilder, builder); + this.meta.count(old_string_buf, *Lockfile.StringBuilder, builder); + this.scripts.count(old_string_buf, *Lockfile.StringBuilder, builder); + for (old.patched_dependencies.values()) |patched_dep| builder.count(patched_dep.path.slice(old.buffers.string_bytes.items)); + const new_extern_string_count = this.bin.count(old_string_buf, old_extern_string_buf, *Lockfile.StringBuilder, builder); + const old_dependencies: []const Dependency = this.dependencies.get(old.buffers.dependencies.items); + const old_resolutions: []const PackageID = this.resolutions.get(old.buffers.resolutions.items); + + for (old_dependencies) |dependency| { + dependency.count(old_string_buf, *Lockfile.StringBuilder, builder); + } + + try builder.allocate(); + + // should be unnecessary, but Just In Case + try new.buffers.dependencies.ensureUnusedCapacity(new.allocator, old_dependencies.len); + try new.buffers.resolutions.ensureUnusedCapacity(new.allocator, old_dependencies.len); + try new.buffers.extern_strings.ensureUnusedCapacity(new.allocator, new_extern_string_count); + + const prev_len = @as(u32, @truncate(new.buffers.dependencies.items.len)); + const end = prev_len + @as(u32, @truncate(old_dependencies.len)); + const max_package_id = @as(PackageID, @truncate(old.packages.len)); + + new.buffers.dependencies.items = new.buffers.dependencies.items.ptr[0..end]; + new.buffers.resolutions.items = new.buffers.resolutions.items.ptr[0..end]; + + new.buffers.extern_strings.items.len += new_extern_string_count; + const new_extern_strings = new.buffers.extern_strings.items[new.buffers.extern_strings.items.len - new_extern_string_count ..]; + + const dependencies: []Dependency = new.buffers.dependencies.items[prev_len..end]; + const resolutions: []PackageID = new.buffers.resolutions.items[prev_len..end]; + + const id = @as(PackageID, @truncate(new.packages.len)); + const new_package = try new.appendPackageWithID( + .{ + .name = builder.appendWithHash( + String, + this.name.slice(old_string_buf), + this.name_hash, + ), + .bin = this.bin.clone( + old_string_buf, + old_extern_string_buf, + new.buffers.extern_strings.items, + new_extern_strings, + *Lockfile.StringBuilder, + builder, + ), + .name_hash = this.name_hash, + .meta = this.meta.clone( + id, + old_string_buf, + *Lockfile.StringBuilder, + builder, + ), + .resolution = this.resolution.clone( + old_string_buf, + *Lockfile.StringBuilder, + builder, + ), + .scripts = this.scripts.clone( + old_string_buf, + *Lockfile.StringBuilder, + builder, + ), + .dependencies = .{ .off = prev_len, .len = end - prev_len }, + .resolutions = .{ .off = prev_len, .len = end - prev_len }, + }, + id, + ); + + package_id_mapping[this.meta.id] = new_package.meta.id; + + if (cloner.manager.preinstall_state.items.len > 0) { + cloner.manager.preinstall_state.items[new_package.meta.id] = cloner.old_preinstall_state.items[this.meta.id]; + } + + for (old_dependencies, dependencies) |old_dep, *new_dep| { + new_dep.* = try old_dep.clone( + pm, + old_string_buf, + *Lockfile.StringBuilder, + builder, + ); + } + + builder.clamp(); + + cloner.trees_count += @as(u32, @intFromBool(old_resolutions.len > 0)); + + for (old_resolutions, resolutions, 0..) |old_resolution, *resolution, i| { + if (old_resolution >= max_package_id) { + resolution.* = invalid_package_id; + continue; + } + + const mapped = package_id_mapping[old_resolution]; + if (mapped < max_package_id) { + resolution.* = mapped; + } else { + try cloner.clone_queue.append(.{ + .old_resolution = old_resolution, + .parent = new_package.meta.id, + .resolve_id = new_package.resolutions.off + @as(PackageID, @intCast(i)), + }); } } - bin_extern_strings_count = package_version.bin.count(string_buf, manifest.extern_strings_bin_entries, @TypeOf(&string_builder), &string_builder); + return new_package.meta.id; } - string_builder.count(manifest.str(&package_version_ptr.tarball_url)); + pub fn fromPackageJSON( + lockfile: *Lockfile, + pm: *PackageManager, + package_json: *PackageJSON, + comptime features: Features, + ) !@This() { + var package = @This(){}; - try string_builder.allocate(); - defer string_builder.clamp(); - var extern_strings_list = &lockfile.buffers.extern_strings; - var dependencies_list = &lockfile.buffers.dependencies; - var resolutions_list = &lockfile.buffers.resolutions; - try dependencies_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - try resolutions_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - try extern_strings_list.ensureUnusedCapacity(lockfile.allocator, bin_extern_strings_count); - extern_strings_list.items.len += bin_extern_strings_count; - const extern_strings_slice = extern_strings_list.items[extern_strings_list.items.len - bin_extern_strings_count ..]; + // var string_buf = package_json; - // -- Cloning - { - const package_name: ExternalString = string_builder.appendWithHash(ExternalString, manifest.name(), manifest.pkg.name.hash); - package.name_hash = package_name.hash; - package.name = package_name.value; - package.resolution = Resolution{ - .value = .{ - .npm = .{ - .version = version.append( - manifest.string_buf, - @TypeOf(&string_builder), - &string_builder, - ), - .url = string_builder.append(String, manifest.str(&package_version_ptr.tarball_url)), - }, - }, - .tag = .npm, + var string_builder = lockfile.stringBuilder(); + + var total_dependencies_count: u32 = 0; + // var bin_extern_strings_count: u32 = 0; + + // --- Counting + { + string_builder.count(package_json.name); + string_builder.count(package_json.version); + const dependencies = package_json.dependencies.map.values(); + for (dependencies) |dep| { + if (dep.behavior.isEnabled(features)) { + dep.count(package_json.dependencies.source_buf, @TypeOf(&string_builder), &string_builder); + total_dependencies_count += 1; + } + } + } + + // string_builder.count(manifest.str(&package_version_ptr.tarball_url)); + + try string_builder.allocate(); + defer string_builder.clamp(); + // var extern_strings_list = &lockfile.buffers.extern_strings; + var dependencies_list = &lockfile.buffers.dependencies; + var resolutions_list = &lockfile.buffers.resolutions; + try dependencies_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + try resolutions_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + // try extern_strings_list.ensureUnusedCapacity(lockfile.allocator, bin_extern_strings_count); + // extern_strings_list.items.len += bin_extern_strings_count; + + // -- Cloning + { + const package_name: ExternalString = string_builder.append(ExternalString, package_json.name); + package.name_hash = package_name.hash; + package.name = package_name.value; + + package.resolution = .{ + .tag = .root, + .value = .{ .root = {} }, + }; + + const total_len = dependencies_list.items.len + total_dependencies_count; + if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len); + + var dependencies: []Dependency = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; + @memset(dependencies, Dependency{}); + + const package_dependencies = package_json.dependencies.map.values(); + const source_buf = package_json.dependencies.source_buf; + for (package_dependencies) |dep| { + if (!dep.behavior.isEnabled(features)) continue; + + dependencies[0] = try dep.clone(pm, source_buf, @TypeOf(&string_builder), &string_builder); + dependencies = dependencies[1..]; + if (dependencies.len == 0) break; + } + + // We lose the bin info here + // package.bin = package_version.bin.clone(string_buf, manifest.extern_strings_bin_entries, extern_strings_list.items, extern_strings_slice, @TypeOf(&string_builder), &string_builder); + // and the integriy hash + // package.meta.integrity = package_version.integrity; + + package.meta.arch = package_json.arch; + package.meta.os = package_json.os; + + package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len)); + package.dependencies.len = total_dependencies_count - @as(u32, @truncate(dependencies.len)); + package.resolutions.off = package.dependencies.off; + package.resolutions.len = package.dependencies.len; + + const new_length = package.dependencies.len + dependencies_list.items.len; + + @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); + + dependencies_list.items = dependencies_list.items.ptr[0..new_length]; + resolutions_list.items = resolutions_list.items.ptr[0..new_length]; + + return package; + } + } + + pub fn fromNPM( + pm: *PackageManager, + allocator: Allocator, + lockfile: *Lockfile, + log: *logger.Log, + manifest: *const Npm.PackageManifest, + version: Semver.Version, + package_version_ptr: *const Npm.PackageVersion, + string_buf: []const u8, + comptime features: Features, + ) !@This() { + var package = @This(){}; + + const package_version = package_version_ptr.*; + + const dependency_groups = comptime brk: { + var out_groups: [ + @as(usize, @intFromBool(features.dependencies)) + + @as(usize, @intFromBool(features.dev_dependencies)) + + @as(usize, @intFromBool(features.optional_dependencies)) + + @as(usize, @intFromBool(features.peer_dependencies)) + ]DependencyGroup = undefined; + var out_group_i: usize = 0; + + if (features.dependencies) { + out_groups[out_group_i] = DependencyGroup.dependencies; + out_group_i += 1; + } + if (features.dev_dependencies) { + out_groups[out_group_i] = DependencyGroup.dev; + out_group_i += 1; + } + + if (features.optional_dependencies) { + out_groups[out_group_i] = DependencyGroup.optional; + out_group_i += 1; + } + + if (features.peer_dependencies) { + out_groups[out_group_i] = DependencyGroup.peer; + out_group_i += 1; + } + + break :brk out_groups; }; - const total_len = dependencies_list.items.len + total_dependencies_count; - if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len); + var string_builder = lockfile.stringBuilder(); - var dependencies = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; - @memset(dependencies, .{}); + var total_dependencies_count: u32 = 0; + var bin_extern_strings_count: u32 = 0; - total_dependencies_count = 0; - inline for (dependency_groups) |group| { - const map: ExternalStringMap = @field(package_version, group.field); - const keys = map.name.get(manifest.external_strings); - const version_strings = map.value.get(manifest.external_strings_for_versions); + // --- Counting + { + string_builder.count(manifest.name()); + version.count(string_buf, @TypeOf(&string_builder), &string_builder); - if (comptime Environment.isDebug) assert(keys.len == version_strings.len); - const is_peer = comptime strings.eqlComptime(group.field, "peer_dependencies"); + inline for (dependency_groups) |group| { + const map: ExternalStringMap = @field(package_version, group.field); + const keys = map.name.get(manifest.external_strings); + const version_strings = map.value.get(manifest.external_strings_for_versions); + total_dependencies_count += map.value.len; - list: for (keys, version_strings, 0..) |key, version_string_, i| { - // Duplicate peer & dev dependencies are promoted to whichever appeared first - // In practice, npm validates this so it shouldn't happen - var duplicate_at: ?usize = null; - if (comptime group.behavior.isPeer() or group.behavior.isDev() or group.behavior.isOptional()) { - for (dependencies[0..total_dependencies_count], 0..) |dependency, j| { - if (dependency.name_hash == key.hash) { - if (comptime group.behavior.isOptional()) { - duplicate_at = j; - break; + if (comptime Environment.isDebug) assert(keys.len == version_strings.len); + + for (keys, version_strings) |key, ver| { + string_builder.count(key.slice(string_buf)); + string_builder.count(ver.slice(string_buf)); + } + } + + bin_extern_strings_count = package_version.bin.count(string_buf, manifest.extern_strings_bin_entries, @TypeOf(&string_builder), &string_builder); + } + + string_builder.count(manifest.str(&package_version_ptr.tarball_url)); + + try string_builder.allocate(); + defer string_builder.clamp(); + var extern_strings_list = &lockfile.buffers.extern_strings; + var dependencies_list = &lockfile.buffers.dependencies; + var resolutions_list = &lockfile.buffers.resolutions; + try dependencies_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + try resolutions_list.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + try extern_strings_list.ensureUnusedCapacity(lockfile.allocator, bin_extern_strings_count); + extern_strings_list.items.len += bin_extern_strings_count; + const extern_strings_slice = extern_strings_list.items[extern_strings_list.items.len - bin_extern_strings_count ..]; + + // -- Cloning + { + const package_name: ExternalString = string_builder.appendWithHash(ExternalString, manifest.name(), manifest.pkg.name.hash); + package.name_hash = package_name.hash; + package.name = package_name.value; + package.resolution = Resolution{ + .value = .{ + .npm = .{ + .version = version.append( + manifest.string_buf, + @TypeOf(&string_builder), + &string_builder, + ), + .url = string_builder.append(String, manifest.str(&package_version_ptr.tarball_url)), + }, + }, + .tag = .npm, + }; + + const total_len = dependencies_list.items.len + total_dependencies_count; + if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len); + + var dependencies = dependencies_list.items.ptr[dependencies_list.items.len..total_len]; + @memset(dependencies, .{}); + + total_dependencies_count = 0; + inline for (dependency_groups) |group| { + const map: ExternalStringMap = @field(package_version, group.field); + const keys = map.name.get(manifest.external_strings); + const version_strings = map.value.get(manifest.external_strings_for_versions); + + if (comptime Environment.isDebug) assert(keys.len == version_strings.len); + const is_peer = comptime strings.eqlComptime(group.field, "peer_dependencies"); + + list: for (keys, version_strings, 0..) |key, version_string_, i| { + // Duplicate peer & dev dependencies are promoted to whichever appeared first + // In practice, npm validates this so it shouldn't happen + var duplicate_at: ?usize = null; + if (comptime group.behavior.isPeer() or group.behavior.isDev() or group.behavior.isOptional()) { + for (dependencies[0..total_dependencies_count], 0..) |dependency, j| { + if (dependency.name_hash == key.hash) { + if (comptime group.behavior.isOptional()) { + duplicate_at = j; + break; + } + + continue :list; + } + } + } + + const name: ExternalString = string_builder.appendWithHash(ExternalString, key.slice(string_buf), key.hash); + const dep_version = string_builder.appendWithHash(String, version_string_.slice(string_buf), version_string_.hash); + const sliced = dep_version.sliced(lockfile.buffers.string_bytes.items); + + var behavior = group.behavior; + if (comptime is_peer) { + behavior.optional = i < package_version.non_optional_peer_dependencies_start; + } + if (package_version_ptr.allDependenciesBundled()) { + behavior.bundled = true; + } else for (package_version.bundled_dependencies.get(manifest.bundled_deps_buf)) |bundled_dep_name_hash| { + if (bundled_dep_name_hash == name.hash) { + behavior.bundled = true; + break; + } + } + + const dependency = Dependency{ + .name = name.value, + .name_hash = name.hash, + .behavior = behavior, + .version = Dependency.parse( + allocator, + name.value, + name.hash, + sliced.slice, + &sliced, + log, + pm, + ) orelse Dependency.Version{}, + }; + + // If a dependency appears in both "dependencies" and "optionalDependencies", it is considered optional! + if (comptime group.behavior.isOptional()) { + if (duplicate_at) |j| { + // need to shift dependencies after the duplicate to maintain sort order + for (j + 1..total_dependencies_count) |k| { + dependencies[k - 1] = dependencies[k]; } + // https://docs.npmjs.com/cli/v8/configuring-npm/package-json#optionaldependencies + // > Entries in optionalDependencies will override entries of the same name in dependencies, so it's usually best to only put in one place. + dependencies[total_dependencies_count - 1] = dependency; continue :list; } } - } - const name: ExternalString = string_builder.appendWithHash(ExternalString, key.slice(string_buf), key.hash); - const dep_version = string_builder.appendWithHash(String, version_string_.slice(string_buf), version_string_.hash); - const sliced = dep_version.sliced(lockfile.buffers.string_bytes.items); - - var behavior = group.behavior; - if (comptime is_peer) { - behavior.optional = i < package_version.non_optional_peer_dependencies_start; + dependencies[total_dependencies_count] = dependency; + total_dependencies_count += 1; } - if (package_version_ptr.allDependenciesBundled()) { - behavior.bundled = true; - } else for (package_version.bundled_dependencies.get(manifest.bundled_deps_buf)) |bundled_dep_name_hash| { - if (bundled_dep_name_hash == name.hash) { - behavior.bundled = true; + } + + package.bin = package_version.bin.clone(string_buf, manifest.extern_strings_bin_entries, extern_strings_list.items, extern_strings_slice, @TypeOf(&string_builder), &string_builder); + + package.meta.arch = package_version.cpu; + package.meta.os = package_version.os; + package.meta.integrity = package_version.integrity; + package.meta.setHasInstallScript(package_version.has_install_script); + + package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len)); + package.dependencies.len = total_dependencies_count; + package.resolutions.off = package.dependencies.off; + package.resolutions.len = package.dependencies.len; + + const new_length = package.dependencies.len + dependencies_list.items.len; + + @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); + + dependencies_list.items = dependencies_list.items.ptr[0..new_length]; + resolutions_list.items = resolutions_list.items.ptr[0..new_length]; + + if (comptime Environment.isDebug) { + if (package.resolution.value.npm.url.isEmpty()) { + Output.panic("tarball_url is empty for package {s}@{}", .{ manifest.name(), version }); + } + } + + return package; + } + } + + pub const Diff = struct { + pub const Op = enum { + add, + remove, + update, + unlink, + link, + }; + + pub const Summary = struct { + add: u32 = 0, + remove: u32 = 0, + update: u32 = 0, + overrides_changed: bool = false, + catalogs_changed: bool = false, + + // bool for if this dependency should be added to lockfile trusted dependencies. + // it is false when the new trusted dependency is coming from the default list. + added_trusted_dependencies: std.ArrayHashMapUnmanaged(TruncatedPackageNameHash, bool, ArrayIdentityContext, false) = .{}, + removed_trusted_dependencies: TrustedDependenciesSet = .{}, + + patched_dependencies_changed: bool = false, + + pub inline fn sum(this: *Summary, that: Summary) void { + this.add += that.add; + this.remove += that.remove; + this.update += that.update; + } + + pub inline fn hasDiffs(this: Summary) bool { + return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or this.catalogs_changed or + this.added_trusted_dependencies.count() > 0 or + this.removed_trusted_dependencies.count() > 0 or + this.patched_dependencies_changed; + } + }; + + pub fn generate( + pm: *PackageManager, + allocator: Allocator, + log: *logger.Log, + from_lockfile: *Lockfile, + to_lockfile: *Lockfile, + from: *PackageType, + to: *PackageType, + update_requests: ?[]PackageManager.UpdateRequest, + id_mapping: ?[]PackageID, + ) !Summary { + var summary = Summary{}; + const is_root = id_mapping != null; + var to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); + const from_deps = from.dependencies.get(from_lockfile.buffers.dependencies.items); + const from_resolutions = from.resolutions.get(from_lockfile.buffers.resolutions.items); + var to_i: usize = 0; + + if (from_lockfile.overrides.map.count() != to_lockfile.overrides.map.count()) { + summary.overrides_changed = true; + + if (PackageManager.verbose_install) { + Output.prettyErrorln("Overrides changed since last install", .{}); + } + } else { + from_lockfile.overrides.sort(from_lockfile); + to_lockfile.overrides.sort(to_lockfile); + for ( + from_lockfile.overrides.map.keys(), + from_lockfile.overrides.map.values(), + to_lockfile.overrides.map.keys(), + to_lockfile.overrides.map.values(), + ) |from_k, *from_override, to_k, *to_override| { + if ((from_k != to_k) or (!from_override.eql(to_override, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items))) { + summary.overrides_changed = true; + if (PackageManager.verbose_install) { + Output.prettyErrorln("Overrides changed since last install", .{}); + } break; } } - - const dependency = Dependency{ - .name = name.value, - .name_hash = name.hash, - .behavior = behavior, - .version = Dependency.parse( - allocator, - name.value, - name.hash, - sliced.slice, - &sliced, - log, - pm, - ) orelse Dependency.Version{}, - }; - - // If a dependency appears in both "dependencies" and "optionalDependencies", it is considered optional! - if (comptime group.behavior.isOptional()) { - if (duplicate_at) |j| { - // need to shift dependencies after the duplicate to maintain sort order - for (j + 1..total_dependencies_count) |k| { - dependencies[k - 1] = dependencies[k]; - } - - // https://docs.npmjs.com/cli/v8/configuring-npm/package-json#optionaldependencies - // > Entries in optionalDependencies will override entries of the same name in dependencies, so it's usually best to only put in one place. - dependencies[total_dependencies_count - 1] = dependency; - continue :list; - } - } - - dependencies[total_dependencies_count] = dependency; - total_dependencies_count += 1; - } - } - - package.bin = package_version.bin.clone(string_buf, manifest.extern_strings_bin_entries, extern_strings_list.items, extern_strings_slice, @TypeOf(&string_builder), &string_builder); - - package.meta.arch = package_version.cpu; - package.meta.os = package_version.os; - package.meta.integrity = package_version.integrity; - package.meta.setHasInstallScript(package_version.has_install_script); - - package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len)); - package.dependencies.len = total_dependencies_count; - package.resolutions.off = package.dependencies.off; - package.resolutions.len = package.dependencies.len; - - const new_length = package.dependencies.len + dependencies_list.items.len; - - @memset(resolutions_list.items.ptr[package.dependencies.off .. package.dependencies.off + package.dependencies.len], invalid_package_id); - - dependencies_list.items = dependencies_list.items.ptr[0..new_length]; - resolutions_list.items = resolutions_list.items.ptr[0..new_length]; - - if (comptime Environment.isDebug) { - if (package.resolution.value.npm.url.isEmpty()) { - Output.panic("tarball_url is empty for package {s}@{}", .{ manifest.name(), version }); - } - } - - return package; - } - } - - pub const Diff = struct { - pub const Op = enum { - add, - remove, - update, - unlink, - link, - }; - - pub const Summary = struct { - add: u32 = 0, - remove: u32 = 0, - update: u32 = 0, - overrides_changed: bool = false, - catalogs_changed: bool = false, - - // bool for if this dependency should be added to lockfile trusted dependencies. - // it is false when the new trusted dependency is coming from the default list. - added_trusted_dependencies: std.ArrayHashMapUnmanaged(TruncatedPackageNameHash, bool, ArrayIdentityContext, false) = .{}, - removed_trusted_dependencies: TrustedDependenciesSet = .{}, - - patched_dependencies_changed: bool = false, - - pub inline fn sum(this: *Summary, that: Summary) void { - this.add += that.add; - this.remove += that.remove; - this.update += that.update; - } - - pub inline fn hasDiffs(this: Summary) bool { - return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or this.catalogs_changed or - this.added_trusted_dependencies.count() > 0 or - this.removed_trusted_dependencies.count() > 0 or - this.patched_dependencies_changed; - } - }; - - pub fn generate( - pm: *PackageManager, - allocator: Allocator, - log: *logger.Log, - from_lockfile: *Lockfile, - to_lockfile: *Lockfile, - from: *Package, - to: *Package, - update_requests: ?[]PackageManager.UpdateRequest, - id_mapping: ?[]PackageID, - ) !Summary { - var summary = Summary{}; - const is_root = id_mapping != null; - var to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); - const from_deps = from.dependencies.get(from_lockfile.buffers.dependencies.items); - const from_resolutions = from.resolutions.get(from_lockfile.buffers.resolutions.items); - var to_i: usize = 0; - - if (from_lockfile.overrides.map.count() != to_lockfile.overrides.map.count()) { - summary.overrides_changed = true; - - if (PackageManager.verbose_install) { - Output.prettyErrorln("Overrides changed since last install", .{}); - } - } else { - from_lockfile.overrides.sort(from_lockfile); - to_lockfile.overrides.sort(to_lockfile); - for ( - from_lockfile.overrides.map.keys(), - from_lockfile.overrides.map.values(), - to_lockfile.overrides.map.keys(), - to_lockfile.overrides.map.values(), - ) |from_k, *from_override, to_k, *to_override| { - if ((from_k != to_k) or (!from_override.eql(to_override, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items))) { - summary.overrides_changed = true; - if (PackageManager.verbose_install) { - Output.prettyErrorln("Overrides changed since last install", .{}); - } - break; - } - } - } - - if (is_root) catalogs: { - - // don't sort if lengths are different - if (from_lockfile.catalogs.default.count() != to_lockfile.catalogs.default.count()) { - summary.catalogs_changed = true; - break :catalogs; } - if (from_lockfile.catalogs.groups.count() != to_lockfile.catalogs.groups.count()) { - summary.catalogs_changed = true; - break :catalogs; - } + if (is_root) catalogs: { - from_lockfile.catalogs.sort(from_lockfile); - to_lockfile.catalogs.sort(to_lockfile); - - for ( - from_lockfile.catalogs.default.keys(), - from_lockfile.catalogs.default.values(), - to_lockfile.catalogs.default.keys(), - to_lockfile.catalogs.default.values(), - ) |from_dep_name, *from_dep, to_dep_name, *to_dep| { - if (!from_dep_name.eql(to_dep_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + // don't sort if lengths are different + if (from_lockfile.catalogs.default.count() != to_lockfile.catalogs.default.count()) { summary.catalogs_changed = true; break :catalogs; } - if (!from_dep.eql(to_dep, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { - summary.catalogs_changed = true; - break :catalogs; - } - } - - for ( - from_lockfile.catalogs.groups.keys(), - from_lockfile.catalogs.groups.values(), - to_lockfile.catalogs.groups.keys(), - to_lockfile.catalogs.groups.values(), - ) |from_catalog_name, from_catalog_deps, to_catalog_name, to_catalog_deps| { - if (!from_catalog_name.eql(to_catalog_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + if (from_lockfile.catalogs.groups.count() != to_lockfile.catalogs.groups.count()) { summary.catalogs_changed = true; break :catalogs; } - if (from_catalog_deps.count() != to_catalog_deps.count()) { - summary.catalogs_changed = true; - break :catalogs; - } + from_lockfile.catalogs.sort(from_lockfile); + to_lockfile.catalogs.sort(to_lockfile); for ( - from_catalog_deps.keys(), - from_catalog_deps.values(), - to_catalog_deps.keys(), - to_catalog_deps.values(), + from_lockfile.catalogs.default.keys(), + from_lockfile.catalogs.default.values(), + to_lockfile.catalogs.default.keys(), + to_lockfile.catalogs.default.values(), ) |from_dep_name, *from_dep, to_dep_name, *to_dep| { if (!from_dep_name.eql(to_dep_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { summary.catalogs_changed = true; @@ -657,1485 +629,1568 @@ pub const Package = extern struct { break :catalogs; } } + + for ( + from_lockfile.catalogs.groups.keys(), + from_lockfile.catalogs.groups.values(), + to_lockfile.catalogs.groups.keys(), + to_lockfile.catalogs.groups.values(), + ) |from_catalog_name, from_catalog_deps, to_catalog_name, to_catalog_deps| { + if (!from_catalog_name.eql(to_catalog_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (from_catalog_deps.count() != to_catalog_deps.count()) { + summary.catalogs_changed = true; + break :catalogs; + } + + for ( + from_catalog_deps.keys(), + from_catalog_deps.values(), + to_catalog_deps.keys(), + to_catalog_deps.values(), + ) |from_dep_name, *from_dep, to_dep_name, *to_dep| { + if (!from_dep_name.eql(to_dep_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (!from_dep.eql(to_dep, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + } + } } - } - trusted_dependencies: { - // trusted dependency diff - // - // situations: - // 1 - Both old lockfile and new lockfile use default trusted dependencies, no diffs - // 2 - Both exist, only diffs are from additions and removals - // - // 3 - Old lockfile has trusted dependencies, new lockfile does not. Added are dependencies - // from default list that didn't exist previously. We need to be careful not to add these - // to the new lockfile. Removed are dependencies from old list that - // don't exist in the default list. - // - // 4 - Old lockfile used the default list, new lockfile has trusted dependencies. Added - // are dependencies are all from the new lockfile. Removed is empty because the default - // list isn't appended to the lockfile. + trusted_dependencies: { + // trusted dependency diff + // + // situations: + // 1 - Both old lockfile and new lockfile use default trusted dependencies, no diffs + // 2 - Both exist, only diffs are from additions and removals + // + // 3 - Old lockfile has trusted dependencies, new lockfile does not. Added are dependencies + // from default list that didn't exist previously. We need to be careful not to add these + // to the new lockfile. Removed are dependencies from old list that + // don't exist in the default list. + // + // 4 - Old lockfile used the default list, new lockfile has trusted dependencies. Added + // are dependencies are all from the new lockfile. Removed is empty because the default + // list isn't appended to the lockfile. - // 1 - if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies == null) break :trusted_dependencies; + // 1 + if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies == null) break :trusted_dependencies; - // 2 - if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies != null) { - const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; - const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; + // 2 + if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies != null) { + const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; + const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; - { - // added - var to_trusted_iter = to_trusted_dependencies.iterator(); - while (to_trusted_iter.next()) |entry| { - const to_trusted = entry.key_ptr.*; - if (!from_trusted_dependencies.contains(to_trusted)) { + { + // added + var to_trusted_iter = to_trusted_dependencies.iterator(); + while (to_trusted_iter.next()) |entry| { + const to_trusted = entry.key_ptr.*; + if (!from_trusted_dependencies.contains(to_trusted)) { + try summary.added_trusted_dependencies.put(allocator, to_trusted, true); + } + } + } + + { + // removed + var from_trusted_iter = from_trusted_dependencies.iterator(); + while (from_trusted_iter.next()) |entry| { + const from_trusted = entry.key_ptr.*; + if (!to_trusted_dependencies.contains(from_trusted)) { + try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); + } + } + } + + break :trusted_dependencies; + } + + // 3 + if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies == null) { + const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; + + { + // added + for (default_trusted_dependencies.entries) |entry| { + if (!from_trusted_dependencies.contains(@truncate(entry.hash))) { + // although this is a new trusted dependency, it is from the default + // list so it shouldn't be added to the lockfile + try summary.added_trusted_dependencies.put(allocator, @truncate(entry.hash), false); + } + } + } + + { + // removed + var from_trusted_iter = from_trusted_dependencies.iterator(); + while (from_trusted_iter.next()) |entry| { + const from_trusted = entry.key_ptr.*; + if (!default_trusted_dependencies.hasWithHash(@intCast(from_trusted))) { + try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); + } + } + } + + break :trusted_dependencies; + } + + // 4 + if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies != null) { + const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; + + { + // add all to trusted dependencies, even if they exist in default because they weren't in the + // lockfile originally + var to_trusted_iter = to_trusted_dependencies.iterator(); + while (to_trusted_iter.next()) |entry| { + const to_trusted = entry.key_ptr.*; try summary.added_trusted_dependencies.put(allocator, to_trusted, true); } } - } - { - // removed - var from_trusted_iter = from_trusted_dependencies.iterator(); - while (from_trusted_iter.next()) |entry| { - const from_trusted = entry.key_ptr.*; - if (!to_trusted_dependencies.contains(from_trusted)) { - try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); - } + { + // removed + // none } - } - break :trusted_dependencies; + break :trusted_dependencies; + } } - // 3 - if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies == null) { - const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; + summary.patched_dependencies_changed = patched_dependencies_changed: { + if (from_lockfile.patched_dependencies.entries.len != to_lockfile.patched_dependencies.entries.len) break :patched_dependencies_changed true; + var iter = to_lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (from_lockfile.patched_dependencies.get(entry.key_ptr.*)) |val| { + if (!std.mem.eql( + u8, + val.path.slice(from_lockfile.buffers.string_bytes.items), + entry.value_ptr.path.slice(to_lockfile.buffers.string_bytes.items), + )) break :patched_dependencies_changed true; + } else break :patched_dependencies_changed true; + } + iter = from_lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!to_lockfile.patched_dependencies.contains(entry.key_ptr.*)) break :patched_dependencies_changed true; + } + break :patched_dependencies_changed false; + }; - { - // added - for (default_trusted_dependencies.entries) |entry| { - if (!from_trusted_dependencies.contains(@truncate(entry.hash))) { - // although this is a new trusted dependency, it is from the default - // list so it shouldn't be added to the lockfile - try summary.added_trusted_dependencies.put(allocator, @truncate(entry.hash), false); + for (from_deps, 0..) |*from_dep, i| { + found: { + const prev_i = to_i; + + // common case, dependency is present in both versions: + // - in the same position + // - shifted by a constant offset + while (to_i < to_deps.len) : (to_i += 1) { + if (from_dep.name_hash == to_deps[to_i].name_hash) { + const from_behavior = from_dep.behavior; + const to_behavior = to_deps[to_i].behavior; + + if (from_behavior != to_behavior) { + continue; + } + + break :found; } } - } - { - // removed - var from_trusted_iter = from_trusted_dependencies.iterator(); - while (from_trusted_iter.next()) |entry| { - const from_trusted = entry.key_ptr.*; - if (!default_trusted_dependencies.hasWithHash(@intCast(from_trusted))) { - try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); + // less common, o(n^2) case + to_i = 0; + while (to_i < prev_i) : (to_i += 1) { + if (from_dep.name_hash == to_deps[to_i].name_hash) { + const from_behavior = from_dep.behavior; + const to_behavior = to_deps[to_i].behavior; + + if (from_behavior != to_behavior) { + continue; + } + + break :found; } } - } - break :trusted_dependencies; - } - - // 4 - if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies != null) { - const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; - - { - // add all to trusted dependencies, even if they exist in default because they weren't in the - // lockfile originally - var to_trusted_iter = to_trusted_dependencies.iterator(); - while (to_trusted_iter.next()) |entry| { - const to_trusted = entry.key_ptr.*; - try summary.added_trusted_dependencies.put(allocator, to_trusted, true); - } - } - - { - // removed - // none - } - - break :trusted_dependencies; - } - } - - summary.patched_dependencies_changed = patched_dependencies_changed: { - if (from_lockfile.patched_dependencies.entries.len != to_lockfile.patched_dependencies.entries.len) break :patched_dependencies_changed true; - var iter = to_lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| { - if (from_lockfile.patched_dependencies.get(entry.key_ptr.*)) |val| { - if (!std.mem.eql( - u8, - val.path.slice(from_lockfile.buffers.string_bytes.items), - entry.value_ptr.path.slice(to_lockfile.buffers.string_bytes.items), - )) break :patched_dependencies_changed true; - } else break :patched_dependencies_changed true; - } - iter = from_lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| { - if (!to_lockfile.patched_dependencies.contains(entry.key_ptr.*)) break :patched_dependencies_changed true; - } - break :patched_dependencies_changed false; - }; - - for (from_deps, 0..) |*from_dep, i| { - found: { - const prev_i = to_i; - - // common case, dependency is present in both versions: - // - in the same position - // - shifted by a constant offset - while (to_i < to_deps.len) : (to_i += 1) { - if (from_dep.name_hash == to_deps[to_i].name_hash) { - const from_behavior = from_dep.behavior; - const to_behavior = to_deps[to_i].behavior; - - if (from_behavior != to_behavior) { - continue; - } - - break :found; - } - } - - // less common, o(n^2) case - to_i = 0; - while (to_i < prev_i) : (to_i += 1) { - if (from_dep.name_hash == to_deps[to_i].name_hash) { - const from_behavior = from_dep.behavior; - const to_behavior = to_deps[to_i].behavior; - - if (from_behavior != to_behavior) { - continue; - } - - break :found; - } - } - - // We found a removed dependency! - // We don't need to remove it - // It will be cleaned up later - summary.remove += 1; - continue; - } - defer to_i += 1; - - if (to_deps[to_i].eql(from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items)) { - if (update_requests) |updates| { - if (updates.len == 0 or brk: { - for (updates) |request| { - if (from_dep.name_hash == request.name_hash) break :brk true; - } - break :brk false; - }) { - // Listed as to be updated - summary.update += 1; - continue; - } - } - - if (id_mapping) |mapping| { - const update_mapping = update_mapping: { - if (!is_root or !from_dep.behavior.isWorkspace()) { - break :update_mapping true; - } - - const workspace_path = to_lockfile.workspace_paths.getPtr(from_dep.name_hash) orelse { - break :update_mapping false; - }; - - var package_json_path: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); - defer package_json_path.deinit(); - - package_json_path.append(workspace_path.slice(to_lockfile.buffers.string_bytes.items)); - package_json_path.append("package.json"); - - const source = &(bun.sys.File.toSource(package_json_path.sliceZ(), allocator, .{}).unwrap() catch { - break :update_mapping false; - }); - - var workspace_pkg: Package = .{}; - - const json = pm.workspace_package_json_cache.getWithSource(bun.default_allocator, log, source, .{}).unwrap() catch { - break :update_mapping false; - }; - - var resolver: void = {}; - try workspace_pkg.parseWithJSON( - to_lockfile, - pm, - allocator, - log, - source, - json.root, - void, - &resolver, - Features.workspace, - ); - - to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); - - var from_pkg = from_lockfile.packages.get(from_resolutions[i]); - const diff = try generate( - pm, - allocator, - log, - from_lockfile, - to_lockfile, - &from_pkg, - &workspace_pkg, - update_requests, - null, - ); - - if (pm.options.log_level.isVerbose() and (diff.add + diff.remove + diff.update) > 0) { - Output.prettyErrorln("Workspace package \"{s}\" has added {d} dependencies, removed {d} dependencies, and updated {d} dependencies", .{ - workspace_path.slice(to_lockfile.buffers.string_bytes.items), - diff.add, - diff.remove, - diff.update, - }); - } - - break :update_mapping !diff.hasDiffs(); - }; - - if (update_mapping) { - mapping[to_i] = @truncate(i); - continue; - } - } else { + // We found a removed dependency! + // We don't need to remove it + // It will be cleaned up later + summary.remove += 1; continue; } + defer to_i += 1; + + if (to_deps[to_i].eql(from_dep, to_lockfile.buffers.string_bytes.items, from_lockfile.buffers.string_bytes.items)) { + if (update_requests) |updates| { + if (updates.len == 0 or brk: { + for (updates) |request| { + if (from_dep.name_hash == request.name_hash) break :brk true; + } + break :brk false; + }) { + // Listed as to be updated + summary.update += 1; + continue; + } + } + + if (id_mapping) |mapping| { + const update_mapping = update_mapping: { + if (!is_root or !from_dep.behavior.isWorkspace()) { + break :update_mapping true; + } + + const workspace_path = to_lockfile.workspace_paths.getPtr(from_dep.name_hash) orelse { + break :update_mapping false; + }; + + var package_json_path: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer package_json_path.deinit(); + + package_json_path.append(workspace_path.slice(to_lockfile.buffers.string_bytes.items)); + package_json_path.append("package.json"); + + const source = &(bun.sys.File.toSource(package_json_path.sliceZ(), allocator, .{}).unwrap() catch { + break :update_mapping false; + }); + + var workspace_pkg: PackageType = .{}; + + const json = pm.workspace_package_json_cache.getWithSource(bun.default_allocator, log, source, .{}).unwrap() catch { + break :update_mapping false; + }; + + var resolver: void = {}; + try workspace_pkg.parseWithJSON( + to_lockfile, + pm, + allocator, + log, + source, + json.root, + void, + &resolver, + Features.workspace, + ); + + to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); + + var from_pkg = from_lockfile.packages.get(from_resolutions[i]); + const diff = try generate( + pm, + allocator, + log, + from_lockfile, + to_lockfile, + &from_pkg, + &workspace_pkg, + update_requests, + null, + ); + + if (pm.options.log_level.isVerbose() and (diff.add + diff.remove + diff.update) > 0) { + Output.prettyErrorln("Workspace package \"{s}\" has added {d} dependencies, removed {d} dependencies, and updated {d} dependencies", .{ + workspace_path.slice(to_lockfile.buffers.string_bytes.items), + diff.add, + diff.remove, + diff.update, + }); + } + + break :update_mapping !diff.hasDiffs(); + }; + + if (update_mapping) { + mapping[to_i] = @truncate(i); + continue; + } + } else { + continue; + } + } + + // We found a changed dependency! + summary.update += 1; } - // We found a changed dependency! - summary.update += 1; - } + // Use saturating arithmetic here because a migrated + // package-lock.json could be out of sync with the package.json, so the + // number of from_deps could be greater than to_deps. + summary.add = @truncate((to_deps.len) -| (from_deps.len -| summary.remove)); - // Use saturating arithmetic here because a migrated - // package-lock.json could be out of sync with the package.json, so the - // number of from_deps could be greater than to_deps. - summary.add = @truncate((to_deps.len) -| (from_deps.len -| summary.remove)); - - if (from.resolution.tag != .root) { - inline for (Lockfile.Scripts.names) |hook| { - if (!@field(to.scripts, hook).eql( - @field(from.scripts, hook), - to_lockfile.buffers.string_bytes.items, - from_lockfile.buffers.string_bytes.items, - )) { - // We found a changed life-cycle script - summary.update += 1; + if (from.resolution.tag != .root) { + inline for (Lockfile.Scripts.names) |hook| { + if (!@field(to.scripts, hook).eql( + @field(from.scripts, hook), + to_lockfile.buffers.string_bytes.items, + from_lockfile.buffers.string_bytes.items, + )) { + // We found a changed life-cycle script + summary.update += 1; + } } } - } - return summary; + return summary; + } + }; + + pub fn hash(name: string, version: Semver.Version) u64 { + var hasher = bun.Wyhash.init(0); + hasher.update(name); + hasher.update(std.mem.asBytes(&version)); + return hasher.final(); } - }; - pub fn hash(name: string, version: Semver.Version) u64 { - var hasher = bun.Wyhash.init(0); - hasher.update(name); - hasher.update(std.mem.asBytes(&version)); - return hasher.final(); - } + pub fn parse( + package: *@This(), + lockfile: *Lockfile, + pm: *PackageManager, + allocator: Allocator, + log: *logger.Log, + source: *const logger.Source, + comptime ResolverContext: type, + resolver: *ResolverContext, + comptime features: Features, + ) !void { + initializeStore(); + const json = JSON.parsePackageJSONUTF8(source, log, allocator) catch |err| { + log.print(Output.errorWriter()) catch {}; + Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), source.path.prettyDir() }); + Global.crash(); + }; - pub fn parse( - package: *Package, - lockfile: *Lockfile, - pm: *PackageManager, - allocator: Allocator, - log: *logger.Log, - source: *const logger.Source, - comptime ResolverContext: type, - resolver: *ResolverContext, - comptime features: Features, - ) !void { - initializeStore(); - const json = JSON.parsePackageJSONUTF8(source, log, allocator) catch |err| { - log.print(Output.errorWriter()) catch {}; - Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), source.path.prettyDir() }); - Global.crash(); - }; + try package.parseWithJSON( + lockfile, + pm, + allocator, + log, + source, + json, + ResolverContext, + resolver, + features, + ); + } - try package.parseWithJSON( - lockfile, - pm, - allocator, - log, - source, - json, - ResolverContext, - resolver, - features, - ); - } - - fn parseDependency( - lockfile: *Lockfile, - pm: *PackageManager, - allocator: Allocator, - log: *logger.Log, - source: *const logger.Source, - comptime group: DependencyGroup, - string_builder: *StringBuilder, - comptime features: Features, - package_dependencies: []Dependency, - dependencies_count: u32, - comptime tag: ?Dependency.Version.Tag, - workspace_ver: ?Semver.Version, - external_alias: ExternalString, - version: string, - key_loc: logger.Loc, - value_loc: logger.Loc, - ) !?Dependency { - const external_version = brk: { - if (comptime Environment.isWindows) { - switch (tag orelse Dependency.Version.Tag.infer(version)) { - .workspace, .folder, .symlink, .tarball => { - if (String.canInline(version)) { - var copy = string_builder.append(String, version); - bun.path.dangerouslyConvertPathToPosixInPlace(u8, ©.bytes); - break :brk copy; - } else { - const str_ = string_builder.append(String, version); - const ptr = str_.ptr(); - bun.path.dangerouslyConvertPathToPosixInPlace(u8, lockfile.buffers.string_bytes.items[ptr.off..][0..ptr.len]); - break :brk str_; - } - }, - else => {}, + fn parseDependency( + lockfile: *Lockfile, + pm: *PackageManager, + allocator: Allocator, + log: *logger.Log, + source: *const logger.Source, + comptime group: DependencyGroup, + string_builder: *StringBuilder, + comptime features: Features, + package_dependencies: []Dependency, + dependencies_count: u32, + comptime tag: ?Dependency.Version.Tag, + workspace_ver: ?Semver.Version, + external_alias: ExternalString, + version: string, + key_loc: logger.Loc, + value_loc: logger.Loc, + ) !?Dependency { + const external_version = brk: { + if (comptime Environment.isWindows) { + switch (tag orelse Dependency.Version.Tag.infer(version)) { + .workspace, .folder, .symlink, .tarball => { + if (String.canInline(version)) { + var copy = string_builder.append(String, version); + bun.path.dangerouslyConvertPathToPosixInPlace(u8, ©.bytes); + break :brk copy; + } else { + const str_ = string_builder.append(String, version); + const ptr = str_.ptr(); + bun.path.dangerouslyConvertPathToPosixInPlace(u8, lockfile.buffers.string_bytes.items[ptr.off..][0..ptr.len]); + break :brk str_; + } + }, + else => {}, + } } - } - break :brk string_builder.append(String, version); - }; + break :brk string_builder.append(String, version); + }; - const buf = lockfile.buffers.string_bytes.items; - const sliced = external_version.sliced(buf); + const buf = lockfile.buffers.string_bytes.items; + const sliced = external_version.sliced(buf); - var dependency_version = Dependency.parseWithOptionalTag( - allocator, - external_alias.value, - external_alias.hash, - sliced.slice, - tag, - &sliced, - log, - pm, - ) orelse Dependency.Version{}; - var workspace_range: ?Semver.Query.Group = null; - const name_hash = switch (dependency_version.tag) { - .npm => String.Builder.stringHash(dependency_version.value.npm.name.slice(buf)), - .workspace => if (strings.hasPrefixComptime(sliced.slice, "workspace:")) brk: { - const input = sliced.slice["workspace:".len..]; - const trimmed = strings.trim(input, &strings.whitespace_chars); - if (trimmed.len != 1 or (trimmed[0] != '*' and trimmed[0] != '^' and trimmed[0] != '~')) { - const at = strings.lastIndexOfChar(input, '@') orelse 0; - if (at > 0) { - workspace_range = Semver.Query.parse(allocator, input[at + 1 ..], sliced) catch |err| { + var dependency_version = Dependency.parseWithOptionalTag( + allocator, + external_alias.value, + external_alias.hash, + sliced.slice, + tag, + &sliced, + log, + pm, + ) orelse Dependency.Version{}; + var workspace_range: ?Semver.Query.Group = null; + const name_hash = switch (dependency_version.tag) { + .npm => String.Builder.stringHash(dependency_version.value.npm.name.slice(buf)), + .workspace => if (strings.hasPrefixComptime(sliced.slice, "workspace:")) brk: { + const input = sliced.slice["workspace:".len..]; + const trimmed = strings.trim(input, &strings.whitespace_chars); + if (trimmed.len != 1 or (trimmed[0] != '*' and trimmed[0] != '^' and trimmed[0] != '~')) { + const at = strings.lastIndexOfChar(input, '@') orelse 0; + if (at > 0) { + workspace_range = Semver.Query.parse(allocator, input[at + 1 ..], sliced) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + } + }; + break :brk String.Builder.stringHash(input[0..at]); + } + workspace_range = Semver.Query.parse(allocator, input, sliced) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), } }; - break :brk String.Builder.stringHash(input[0..at]); } - workspace_range = Semver.Query.parse(allocator, input, sliced) catch |err| { - switch (err) { - error.OutOfMemory => bun.outOfMemory(), - } - }; - } - break :brk external_alias.hash; - } else external_alias.hash, - else => external_alias.hash, - }; + break :brk external_alias.hash; + } else external_alias.hash, + else => external_alias.hash, + }; - var workspace_path: ?String = null; - var workspace_version = workspace_ver; - if (comptime tag == null) { - workspace_path = lockfile.workspace_paths.get(name_hash); - workspace_version = lockfile.workspace_versions.get(name_hash); - } + var workspace_path: ?String = null; + var workspace_version = workspace_ver; + if (comptime tag == null) { + workspace_path = lockfile.workspace_paths.get(name_hash); + workspace_version = lockfile.workspace_versions.get(name_hash); + } - if (comptime tag != null) { - bun.assert(dependency_version.tag != .npm and dependency_version.tag != .dist_tag); - } + if (comptime tag != null) { + bun.assert(dependency_version.tag != .npm and dependency_version.tag != .dist_tag); + } - switch (dependency_version.tag) { - .folder => { - const relative = Path.relative( - FileSystem.instance.top_level_dir, - Path.joinAbsString( + switch (dependency_version.tag) { + .folder => { + const relative = Path.relative( FileSystem.instance.top_level_dir, - &[_]string{ - source.path.name.dir, - dependency_version.value.folder.slice(buf), - }, - .auto, - ), - ); - // if relative is empty, we are linking the package to itself - dependency_version.value.folder = string_builder.append(String, if (relative.len == 0) "." else relative); - }, - .npm => { - const npm = dependency_version.value.npm; - if (workspace_version != null) { - if (pm.options.link_workspace_packages and npm.version.satisfies(workspace_version.?, buf, buf)) { - const path = workspace_path.?.sliced(buf); - if (Dependency.parseWithTag( - allocator, - external_alias.value, - external_alias.hash, - path.slice, - .workspace, - &path, - log, - pm, - )) |dep| { - dependency_version.tag = dep.tag; - dependency_version.value = dep.value; - } - } else { - // It doesn't satisfy, but a workspace shares the same name. Override the workspace with the other dependency - for (package_dependencies[0..dependencies_count]) |*dep| { - if (dep.name_hash == name_hash and dep.behavior.isWorkspace()) { - dep.* = .{ - .behavior = group.behavior, - .name = external_alias.value, - .name_hash = external_alias.hash, - .version = dependency_version, - }; - return null; + Path.joinAbsString( + FileSystem.instance.top_level_dir, + &[_]string{ + source.path.name.dir, + dependency_version.value.folder.slice(buf), + }, + .auto, + ), + ); + // if relative is empty, we are linking the package to itself + dependency_version.value.folder = string_builder.append(String, if (relative.len == 0) "." else relative); + }, + .npm => { + const npm = dependency_version.value.npm; + if (workspace_version != null) { + if (pm.options.link_workspace_packages and npm.version.satisfies(workspace_version.?, buf, buf)) { + const path = workspace_path.?.sliced(buf); + if (Dependency.parseWithTag( + allocator, + external_alias.value, + external_alias.hash, + path.slice, + .workspace, + &path, + log, + pm, + )) |dep| { + dependency_version.tag = dep.tag; + dependency_version.value = dep.value; + } + } else { + // It doesn't satisfy, but a workspace shares the same name. Override the workspace with the other dependency + for (package_dependencies[0..dependencies_count]) |*dep| { + if (dep.name_hash == name_hash and dep.behavior.isWorkspace()) { + dep.* = .{ + .behavior = group.behavior, + .name = external_alias.value, + .name_hash = external_alias.hash, + .version = dependency_version, + }; + return null; + } } } } - } - }, - .workspace => workspace: { - if (workspace_path) |path| { - if (workspace_range) |range| { - if (workspace_version) |ver| { - if (range.satisfies(ver, buf, buf)) { + }, + .workspace => workspace: { + if (workspace_path) |path| { + if (workspace_range) |range| { + if (workspace_version) |ver| { + if (range.satisfies(ver, buf, buf)) { + dependency_version.value.workspace = path; + break :workspace; + } + } + + // important to trim before len == 0 check. `workspace:foo@ ` should install successfully + const version_literal = strings.trim(range.input, &strings.whitespace_chars); + if (version_literal.len == 0 or range.@"is *"() or Semver.Version.isTaggedVersionOnly(version_literal)) { dependency_version.value.workspace = path; break :workspace; } - } - // important to trim before len == 0 check. `workspace:foo@ ` should install successfully - const version_literal = strings.trim(range.input, &strings.whitespace_chars); - if (version_literal.len == 0 or range.@"is *"() or Semver.Version.isTaggedVersionOnly(version_literal)) { - dependency_version.value.workspace = path; - break :workspace; - } - - // workspace is not required to have a version, but if it does - // and this version doesn't match it, fail to install - try log.addErrorFmt( - source, - logger.Loc.Empty, - allocator, - "No matching version for workspace dependency \"{s}\". Version: \"{s}\"", - .{ - external_alias.slice(buf), - dependency_version.literal.slice(buf), - }, - ); - return error.InstallFailed; - } - - dependency_version.value.workspace = path; - } else { - const workspace = dependency_version.value.workspace.slice(buf); - const path = string_builder.append(String, if (strings.eqlComptime(workspace, "*")) "*" else brk: { - var buf2: bun.PathBuffer = undefined; - const rel = Path.relativePlatform( - FileSystem.instance.top_level_dir, - Path.joinAbsStringBuf( - FileSystem.instance.top_level_dir, - &buf2, - &[_]string{ - source.path.name.dir, - workspace, + // workspace is not required to have a version, but if it does + // and this version doesn't match it, fail to install + try log.addErrorFmt( + source, + logger.Loc.Empty, + allocator, + "No matching version for workspace dependency \"{s}\". Version: \"{s}\"", + .{ + external_alias.slice(buf), + dependency_version.literal.slice(buf), }, + ); + return error.InstallFailed; + } + + dependency_version.value.workspace = path; + } else { + const workspace = dependency_version.value.workspace.slice(buf); + const path = string_builder.append(String, if (strings.eqlComptime(workspace, "*")) "*" else brk: { + var buf2: bun.PathBuffer = undefined; + const rel = Path.relativePlatform( + FileSystem.instance.top_level_dir, + Path.joinAbsStringBuf( + FileSystem.instance.top_level_dir, + &buf2, + &[_]string{ + source.path.name.dir, + workspace, + }, + .auto, + ), .auto, - ), - .auto, - false, + false, + ); + if (comptime Environment.isWindows) { + bun.path.dangerouslyConvertPathToPosixInPlace(u8, Path.relative_to_common_path_buf[0..rel.len]); + } + break :brk rel; + }); + if (comptime Environment.allow_assert) { + assert(path.len() > 0); + assert(!std.fs.path.isAbsolute(path.slice(buf))); + } + dependency_version.value.workspace = path; + + const workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, name_hash); + const found_matching_workspace = workspace_entry.found_existing; + + if (workspace_version) |ver| { + try lockfile.workspace_versions.put(allocator, name_hash, ver); + for (package_dependencies[0..dependencies_count]) |*package_dep| { + if (switch (package_dep.version.tag) { + // `dependencies` & `workspaces` defined within the same `package.json` + .npm => String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash and + package_dep.version.value.npm.version.satisfies(ver, buf, buf), + // `workspace:*` + .workspace => found_matching_workspace and + String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash, + else => false, + }) { + package_dep.version = dependency_version; + workspace_entry.value_ptr.* = path; + return null; + } + } + } else if (workspace_entry.found_existing) { + for (package_dependencies[0..dependencies_count]) |*package_dep| { + if (package_dep.version.tag == .workspace and + String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash) + { + package_dep.version = dependency_version; + return null; + } + } + return error.InstallFailed; + } + + workspace_entry.value_ptr.* = path; + } + }, + else => {}, + } + + const this_dep = Dependency{ + .behavior = group.behavior, + .name = external_alias.value, + .name_hash = external_alias.hash, + .version = dependency_version, + }; + + // `peerDependencies` may be specified on existing dependencies. Packages in `workspaces` are deduplicated when + // the array is processed + if (comptime features.check_for_duplicate_dependencies and !group.behavior.isPeer() and !group.behavior.isWorkspace()) { + const entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_alias.hash); + if (entry.found_existing) { + // duplicate dependencies are allowed in optionalDependencies + if (comptime group.behavior.isOptional()) { + for (package_dependencies[0..dependencies_count]) |*package_dep| { + if (package_dep.name_hash == this_dep.name_hash) { + package_dep.* = this_dep; + break; + } + } + return null; + } else { + var notes = try allocator.alloc(logger.Data, 1); + + notes[0] = .{ + .text = try std.fmt.allocPrint(lockfile.allocator, "\"{s}\" originally specified here", .{external_alias.slice(buf)}), + .location = logger.Location.initOrNull(source, source.rangeOfString(entry.value_ptr.*)), + }; + + try log.addRangeWarningFmtWithNotes( + source, + source.rangeOfString(key_loc), + lockfile.allocator, + notes, + "Duplicate dependency: \"{s}\" specified in package.json", + .{external_alias.slice(buf)}, ); - if (comptime Environment.isWindows) { - bun.path.dangerouslyConvertPathToPosixInPlace(u8, Path.relative_to_common_path_buf[0..rel.len]); - } - break :brk rel; - }); - if (comptime Environment.allow_assert) { - assert(path.len() > 0); - assert(!std.fs.path.isAbsolute(path.slice(buf))); } - dependency_version.value.workspace = path; - - const workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, name_hash); - const found_matching_workspace = workspace_entry.found_existing; - - if (workspace_version) |ver| { - try lockfile.workspace_versions.put(allocator, name_hash, ver); - for (package_dependencies[0..dependencies_count]) |*package_dep| { - if (switch (package_dep.version.tag) { - // `dependencies` & `workspaces` defined within the same `package.json` - .npm => String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash and - package_dep.version.value.npm.version.satisfies(ver, buf, buf), - // `workspace:*` - .workspace => found_matching_workspace and - String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash, - else => false, - }) { - package_dep.version = dependency_version; - workspace_entry.value_ptr.* = path; - return null; - } - } - } else if (workspace_entry.found_existing) { - for (package_dependencies[0..dependencies_count]) |*package_dep| { - if (package_dep.version.tag == .workspace and - String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash) - { - package_dep.version = dependency_version; - return null; - } - } - return error.InstallFailed; - } - - workspace_entry.value_ptr.* = path; } - }, - else => {}, + + entry.value_ptr.* = value_loc; + } + + return this_dep; } - const this_dep = Dependency{ - .behavior = group.behavior, - .name = external_alias.value, - .name_hash = external_alias.hash, - .version = dependency_version, - }; + pub fn parseWithJSON( + package: *@This(), + lockfile: *Lockfile, + pm: *PackageManager, + allocator: Allocator, + log: *logger.Log, + source: *const logger.Source, + json: Expr, + comptime ResolverContext: type, + resolver: *ResolverContext, + comptime features: Features, + ) !void { + var string_builder = lockfile.stringBuilder(); + var total_dependencies_count: u32 = 0; - // `peerDependencies` may be specified on existing dependencies. Packages in `workspaces` are deduplicated when - // the array is processed - if (comptime features.check_for_duplicate_dependencies and !group.behavior.isPeer() and !group.behavior.isWorkspace()) { - const entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_alias.hash); - if (entry.found_existing) { - // duplicate dependencies are allowed in optionalDependencies - if (comptime group.behavior.isOptional()) { - for (package_dependencies[0..dependencies_count]) |*package_dep| { - if (package_dep.name_hash == this_dep.name_hash) { - package_dep.* = this_dep; - break; + package.meta.origin = if (features.is_main) .local else .npm; + package.name = String{}; + package.name_hash = 0; + + // -- Count the sizes + name: { + if (json.asProperty("name")) |name_q| { + if (name_q.expr.asString(allocator)) |name| { + if (name.len != 0) { + string_builder.count(name); + break :name; } } - return null; - } else { - var notes = try allocator.alloc(logger.Data, 1); + } - notes[0] = .{ - .text = try std.fmt.allocPrint(lockfile.allocator, "\"{s}\" originally specified here", .{external_alias.slice(buf)}), - .location = logger.Location.initOrNull(source, source.rangeOfString(entry.value_ptr.*)), + // name is not validated by npm, so fallback to creating a new from the version literal + if (ResolverContext == PackageManager.GitResolver) { + const resolution: *const Resolution = resolver.resolution; + const repo = switch (resolution.tag) { + .git => resolution.value.git, + .github => resolution.value.github, + + else => break :name, }; - try log.addRangeWarningFmtWithNotes( - source, - source.rangeOfString(key_loc), + resolver.new_name = Repository.createDependencyNameFromVersionLiteral( lockfile.allocator, - notes, - "Duplicate dependency: \"{s}\" specified in package.json", - .{external_alias.slice(buf)}, + &repo, + lockfile, + resolver.dep_id, ); + + string_builder.count(resolver.new_name); } } - entry.value_ptr.* = value_loc; - } - - return this_dep; - } - - pub fn parseWithJSON( - package: *Package, - lockfile: *Lockfile, - pm: *PackageManager, - allocator: Allocator, - log: *logger.Log, - source: *const logger.Source, - json: Expr, - comptime ResolverContext: type, - resolver: *ResolverContext, - comptime features: Features, - ) !void { - var string_builder = lockfile.stringBuilder(); - var total_dependencies_count: u32 = 0; - - package.meta.origin = if (features.is_main) .local else .npm; - package.name = String{}; - package.name_hash = 0; - - // -- Count the sizes - name: { - if (json.asProperty("name")) |name_q| { - if (name_q.expr.asString(allocator)) |name| { - if (name.len != 0) { - string_builder.count(name); - break :name; + if (json.asProperty("patchedDependencies")) |patched_deps| { + const obj = patched_deps.expr.data.e_object; + for (obj.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + if (key.isString() and value.isString()) { + string_builder.count(value.asString(allocator).?); } } } - // name is not validated by npm, so fallback to creating a new from the version literal - if (ResolverContext == PackageManager.GitResolver) { - const resolution: *const Resolution = resolver.resolution; - const repo = switch (resolution.tag) { - .git => resolution.value.git, - .github => resolution.value.github, - - else => break :name, - }; - - resolver.new_name = Repository.createDependencyNameFromVersionLiteral( - lockfile.allocator, - &repo, - lockfile, - resolver.dep_id, - ); - - string_builder.count(resolver.new_name); - } - } - - if (json.asProperty("patchedDependencies")) |patched_deps| { - const obj = patched_deps.expr.data.e_object; - for (obj.properties.slice()) |prop| { - const key = prop.key.?; - const value = prop.value.?; - if (key.isString() and value.isString()) { - string_builder.count(value.asString(allocator).?); + if (comptime !features.is_main) { + if (json.asProperty("version")) |version_q| { + if (version_q.expr.asString(allocator)) |version_str| { + string_builder.count(version_str); + } } } - } - - if (comptime !features.is_main) { - if (json.asProperty("version")) |version_q| { - if (version_q.expr.asString(allocator)) |version_str| { - string_builder.count(version_str); + bin: { + if (json.asProperty("bin")) |bin| { + switch (bin.expr.data) { + .e_object => |obj| { + for (obj.properties.slice()) |bin_prop| { + string_builder.count(bin_prop.key.?.asString(allocator) orelse break :bin); + string_builder.count(bin_prop.value.?.asString(allocator) orelse break :bin); + } + break :bin; + }, + .e_string => { + if (bin.expr.asString(allocator)) |str_| { + string_builder.count(str_); + break :bin; + } + }, + else => {}, + } } - } - } - bin: { - if (json.asProperty("bin")) |bin| { - switch (bin.expr.data) { - .e_object => |obj| { - for (obj.properties.slice()) |bin_prop| { - string_builder.count(bin_prop.key.?.asString(allocator) orelse break :bin); - string_builder.count(bin_prop.value.?.asString(allocator) orelse break :bin); - } - break :bin; - }, - .e_string => { - if (bin.expr.asString(allocator)) |str_| { + + if (json.asProperty("directories")) |dirs| { + if (dirs.expr.asProperty("bin")) |bin_prop| { + if (bin_prop.expr.asString(allocator)) |str_| { string_builder.count(str_); break :bin; } - }, - else => {}, - } - } - - if (json.asProperty("directories")) |dirs| { - if (dirs.expr.asProperty("bin")) |bin_prop| { - if (bin_prop.expr.asString(allocator)) |str_| { - string_builder.count(str_); - break :bin; } } } - } - Package.Scripts.parseCount(allocator, &string_builder, json); + Scripts.parseCount(allocator, &string_builder, json); - if (comptime ResolverContext != void) { - resolver.count(*Lockfile.StringBuilder, &string_builder, json); - } - - const dependency_groups = comptime brk: { - var out_groups: [ - @as(usize, @intFromBool(features.workspaces)) + - @as(usize, @intFromBool(features.dependencies)) + - @as(usize, @intFromBool(features.dev_dependencies)) + - @as(usize, @intFromBool(features.optional_dependencies)) + - @as(usize, @intFromBool(features.peer_dependencies)) - ]DependencyGroup = undefined; - var out_group_i: usize = 0; - - if (features.workspaces) { - out_groups[out_group_i] = DependencyGroup.workspaces; - out_group_i += 1; + if (comptime ResolverContext != void) { + resolver.count(*Lockfile.StringBuilder, &string_builder, json); } - if (features.dependencies) { - out_groups[out_group_i] = DependencyGroup.dependencies; - out_group_i += 1; - } + const dependency_groups = comptime brk: { + var out_groups: [ + @as(usize, @intFromBool(features.workspaces)) + + @as(usize, @intFromBool(features.dependencies)) + + @as(usize, @intFromBool(features.dev_dependencies)) + + @as(usize, @intFromBool(features.optional_dependencies)) + + @as(usize, @intFromBool(features.peer_dependencies)) + ]DependencyGroup = undefined; + var out_group_i: usize = 0; - if (features.dev_dependencies) { - out_groups[out_group_i] = DependencyGroup.dev; - out_group_i += 1; - } - if (features.optional_dependencies) { - out_groups[out_group_i] = DependencyGroup.optional; - out_group_i += 1; - } - - if (features.peer_dependencies) { - out_groups[out_group_i] = DependencyGroup.peer; - out_group_i += 1; - } - - break :brk out_groups; - }; - - var workspace_names = WorkspaceMap.init(allocator); - defer workspace_names.deinit(); - - var optional_peer_dependencies = std.ArrayHashMap(PackageNameHash, void, ArrayIdentityContext.U64, false).init(allocator); - defer optional_peer_dependencies.deinit(); - - if (json.asProperty("peerDependenciesMeta")) |peer_dependencies_meta| { - if (peer_dependencies_meta.expr.data == .e_object) { - const props = peer_dependencies_meta.expr.data.e_object.properties.slice(); - try optional_peer_dependencies.ensureUnusedCapacity(props.len); - for (props) |prop| { - if (prop.value.?.asProperty("optional")) |optional| { - if (optional.expr.data != .e_boolean or !optional.expr.data.e_boolean.value) { - continue; - } - - optional_peer_dependencies.putAssumeCapacity( - String.Builder.stringHash(prop.key.?.asString(allocator) orelse unreachable), - {}, - ); - } + if (features.workspaces) { + out_groups[out_group_i] = DependencyGroup.workspaces; + out_group_i += 1; } - } - } - inline for (dependency_groups) |group| { - if (json.asProperty(group.prop)) |dependencies_q| brk: { - switch (dependencies_q.expr.data) { - .e_array => |arr| { - if (!group.behavior.isWorkspace()) { - log.addErrorFmt(source, dependencies_q.loc, allocator, - \\{0s} expects a map of specifiers, e.g. - \\ "{0s}": {{ - \\ "bun": "latest" - \\ }} - , .{group.prop}) catch {}; - return error.InvalidPackageJSON; - } - total_dependencies_count += try workspace_names.processNamesArray( - allocator, - &pm.workspace_package_json_cache, - log, - arr, - source, - dependencies_q.loc, - &string_builder, - ); - }, - .e_object => |obj| { - if (group.behavior.isWorkspace()) { + if (features.dependencies) { + out_groups[out_group_i] = DependencyGroup.dependencies; + out_group_i += 1; + } - // yarn workspaces expects a "workspaces" property shaped like this: - // - // "workspaces": { - // "packages": [ - // "path/to/package" - // ] - // } - // - if (obj.get("packages")) |packages_query| { - if (packages_query.data != .e_array) { - log.addErrorFmt(source, packages_query.loc, allocator, - // TODO: what if we could comptime call the syntax highlighter - \\"workspaces.packages" expects an array of strings, e.g. - \\ "workspaces": {{ - \\ "packages": [ - \\ "path/to/package" - \\ ] - \\ }} - , .{}) catch {}; - return error.InvalidPackageJSON; - } - total_dependencies_count += try workspace_names.processNamesArray( - allocator, - &pm.workspace_package_json_cache, - log, - packages_query.data.e_array, - source, - packages_query.loc, - &string_builder, - ); + if (features.dev_dependencies) { + out_groups[out_group_i] = DependencyGroup.dev; + out_group_i += 1; + } + if (features.optional_dependencies) { + out_groups[out_group_i] = DependencyGroup.optional; + out_group_i += 1; + } + + if (features.peer_dependencies) { + out_groups[out_group_i] = DependencyGroup.peer; + out_group_i += 1; + } + + break :brk out_groups; + }; + + var workspace_names = WorkspaceMap.init(allocator); + defer workspace_names.deinit(); + + var optional_peer_dependencies = std.ArrayHashMap(PackageNameHash, void, ArrayIdentityContext.U64, false).init(allocator); + defer optional_peer_dependencies.deinit(); + + if (json.asProperty("peerDependenciesMeta")) |peer_dependencies_meta| { + if (peer_dependencies_meta.expr.data == .e_object) { + const props = peer_dependencies_meta.expr.data.e_object.properties.slice(); + try optional_peer_dependencies.ensureUnusedCapacity(props.len); + for (props) |prop| { + if (prop.value.?.asProperty("optional")) |optional| { + if (optional.expr.data != .e_boolean or !optional.expr.data.e_boolean.value) { + continue; } - break :brk; + optional_peer_dependencies.putAssumeCapacity( + String.Builder.stringHash(prop.key.?.asString(allocator) orelse unreachable), + {}, + ); } - for (obj.properties.slice()) |item| { - const key = item.key.?.asString(allocator).?; - const value = item.value.?.asString(allocator) orelse { - log.addErrorFmt(source, item.value.?.loc, allocator, - // TODO: what if we could comptime call the syntax highlighter + } + } + } + + inline for (dependency_groups) |group| { + if (json.asProperty(group.prop)) |dependencies_q| brk: { + switch (dependencies_q.expr.data) { + .e_array => |arr| { + if (!group.behavior.isWorkspace()) { + log.addErrorFmt(source, dependencies_q.loc, allocator, \\{0s} expects a map of specifiers, e.g. \\ "{0s}": {{ \\ "bun": "latest" \\ }} , .{group.prop}) catch {}; return error.InvalidPackageJSON; - }; - - string_builder.count(key); - string_builder.count(value); - - // If it's a folder or workspace, pessimistically assume we will need a maximum path - switch (Dependency.Version.Tag.infer(value)) { - .folder, .workspace => string_builder.cap += bun.MAX_PATH_BYTES, - else => {}, } - } - total_dependencies_count += @as(u32, @truncate(obj.properties.len)); - }, - else => { - if (group.behavior.isWorkspace()) { - log.addErrorFmt(source, dependencies_q.loc, allocator, - // TODO: what if we could comptime call the syntax highlighter - \\"workspaces" expects an array of strings, e.g. - \\ "workspaces": [ - \\ "path/to/package" - \\ ] - , .{}) catch {}; - } else { - log.addErrorFmt(source, dependencies_q.loc, allocator, - \\{0s} expects a map of specifiers, e.g. - \\ "{0s}": {{ - \\ "bun": "latest" - \\ }} - , .{group.prop}) catch {}; - } - return error.InvalidPackageJSON; - }, - } - } - } + total_dependencies_count += try workspace_names.processNamesArray( + allocator, + &pm.workspace_package_json_cache, + log, + arr, + source, + dependencies_q.loc, + &string_builder, + ); + }, + .e_object => |obj| { + if (group.behavior.isWorkspace()) { - if (comptime features.trusted_dependencies) { - if (json.asProperty("trustedDependencies")) |q| { - switch (q.expr.data) { - .e_array => |arr| { - if (lockfile.trusted_dependencies == null) lockfile.trusted_dependencies = .{}; - try lockfile.trusted_dependencies.?.ensureUnusedCapacity(allocator, arr.items.len); - for (arr.slice()) |item| { - const name = item.asString(allocator) orelse { - log.addErrorFmt(source, q.loc, allocator, - \\trustedDependencies expects an array of strings, e.g. - \\ "trustedDependencies": [ - \\ "package_name" + // yarn workspaces expects a "workspaces" property shaped like this: + // + // "workspaces": { + // "packages": [ + // "path/to/package" + // ] + // } + // + if (obj.get("packages")) |packages_query| { + if (packages_query.data != .e_array) { + log.addErrorFmt(source, packages_query.loc, allocator, + // TODO: what if we could comptime call the syntax highlighter + \\"workspaces.packages" expects an array of strings, e.g. + \\ "workspaces": {{ + \\ "packages": [ + \\ "path/to/package" + \\ ] + \\ }} + , .{}) catch {}; + return error.InvalidPackageJSON; + } + total_dependencies_count += try workspace_names.processNamesArray( + allocator, + &pm.workspace_package_json_cache, + log, + packages_query.data.e_array, + source, + packages_query.loc, + &string_builder, + ); + } + + break :brk; + } + for (obj.properties.slice()) |item| { + const key = item.key.?.asString(allocator).?; + const value = item.value.?.asString(allocator) orelse { + log.addErrorFmt(source, item.value.?.loc, allocator, + // TODO: what if we could comptime call the syntax highlighter + \\{0s} expects a map of specifiers, e.g. + \\ "{0s}": {{ + \\ "bun": "latest" + \\ }} + , .{group.prop}) catch {}; + return error.InvalidPackageJSON; + }; + + string_builder.count(key); + string_builder.count(value); + + // If it's a folder or workspace, pessimistically assume we will need a maximum path + switch (Dependency.Version.Tag.infer(value)) { + .folder, .workspace => string_builder.cap += bun.MAX_PATH_BYTES, + else => {}, + } + } + total_dependencies_count += @as(u32, @truncate(obj.properties.len)); + }, + else => { + if (group.behavior.isWorkspace()) { + log.addErrorFmt(source, dependencies_q.loc, allocator, + // TODO: what if we could comptime call the syntax highlighter + \\"workspaces" expects an array of strings, e.g. + \\ "workspaces": [ + \\ "path/to/package" \\ ] , .{}) catch {}; - return error.InvalidPackageJSON; - }; - lockfile.trusted_dependencies.?.putAssumeCapacity(@as(TruncatedPackageNameHash, @truncate(String.Builder.stringHash(name))), {}); - } - }, - else => { - log.addErrorFmt(source, q.loc, allocator, - \\trustedDependencies expects an array of strings, e.g. - \\ "trustedDependencies": [ - \\ "package_name" - \\ ] - , .{}) catch {}; - return error.InvalidPackageJSON; - }, - } - } - } - - if (comptime features.is_main) { - lockfile.overrides.parseCount(lockfile, json, &string_builder); - - if (json.get("workspaces")) |workspaces_expr| { - lockfile.catalogs.parseCount(lockfile, workspaces_expr, &string_builder); - } - - // Count catalog strings in top-level package.json as well, since parseAppend - // might process them later if no catalogs were found in workspaces - lockfile.catalogs.parseCount(lockfile, json, &string_builder); - } - - try string_builder.allocate(); - try lockfile.buffers.dependencies.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); - - const off = lockfile.buffers.dependencies.items.len; - const total_len = off + total_dependencies_count; - if (comptime Environment.allow_assert) assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len); - - const package_dependencies = lockfile.buffers.dependencies.items.ptr[off..total_len]; - - name: { - if (ResolverContext == PackageManager.GitResolver) { - if (resolver.new_name.len != 0) { - defer lockfile.allocator.free(resolver.new_name); - const external_string = string_builder.append(ExternalString, resolver.new_name); - package.name = external_string.value; - package.name_hash = external_string.hash; - break :name; + } else { + log.addErrorFmt(source, dependencies_q.loc, allocator, + \\{0s} expects a map of specifiers, e.g. + \\ "{0s}": {{ + \\ "bun": "latest" + \\ }} + , .{group.prop}) catch {}; + } + return error.InvalidPackageJSON; + }, + } } } - if (json.asProperty("name")) |name_q| { - if (name_q.expr.asString(allocator)) |name| { - if (name.len != 0) { - const external_string = string_builder.append(ExternalString, name); + if (comptime features.trusted_dependencies) { + if (json.asProperty("trustedDependencies")) |q| { + switch (q.expr.data) { + .e_array => |arr| { + if (lockfile.trusted_dependencies == null) lockfile.trusted_dependencies = .{}; + try lockfile.trusted_dependencies.?.ensureUnusedCapacity(allocator, arr.items.len); + for (arr.slice()) |item| { + const name = item.asString(allocator) orelse { + log.addErrorFmt(source, q.loc, allocator, + \\trustedDependencies expects an array of strings, e.g. + \\ "trustedDependencies": [ + \\ "package_name" + \\ ] + , .{}) catch {}; + return error.InvalidPackageJSON; + }; + lockfile.trusted_dependencies.?.putAssumeCapacity(@as(TruncatedPackageNameHash, @truncate(String.Builder.stringHash(name))), {}); + } + }, + else => { + log.addErrorFmt(source, q.loc, allocator, + \\trustedDependencies expects an array of strings, e.g. + \\ "trustedDependencies": [ + \\ "package_name" + \\ ] + , .{}) catch {}; + return error.InvalidPackageJSON; + }, + } + } + } + if (comptime features.is_main) { + lockfile.overrides.parseCount(lockfile, json, &string_builder); + + if (json.get("workspaces")) |workspaces_expr| { + lockfile.catalogs.parseCount(lockfile, workspaces_expr, &string_builder); + } + + // Count catalog strings in top-level package.json as well, since parseAppend + // might process them later if no catalogs were found in workspaces + lockfile.catalogs.parseCount(lockfile, json, &string_builder); + } + + try string_builder.allocate(); + try lockfile.buffers.dependencies.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + try lockfile.buffers.resolutions.ensureUnusedCapacity(lockfile.allocator, total_dependencies_count); + + const off = lockfile.buffers.dependencies.items.len; + const total_len = off + total_dependencies_count; + if (comptime Environment.allow_assert) assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len); + + const package_dependencies = lockfile.buffers.dependencies.items.ptr[off..total_len]; + + name: { + if (ResolverContext == PackageManager.GitResolver) { + if (resolver.new_name.len != 0) { + defer lockfile.allocator.free(resolver.new_name); + const external_string = string_builder.append(ExternalString, resolver.new_name); package.name = external_string.value; package.name_hash = external_string.hash; break :name; } } - } - } - if (comptime !features.is_main) { - if (comptime ResolverContext != void) { - package.resolution = try resolver.resolve( - *Lockfile.StringBuilder, - &string_builder, - json, - ); - } - } else { - package.resolution = .{ - .tag = .root, - .value = .{ .root = {} }, - }; - } + if (json.asProperty("name")) |name_q| { + if (name_q.expr.asString(allocator)) |name| { + if (name.len != 0) { + const external_string = string_builder.append(ExternalString, name); - if (json.asProperty("patchedDependencies")) |patched_deps| { - const obj = patched_deps.expr.data.e_object; - lockfile.patched_dependencies.ensureTotalCapacity(allocator, obj.properties.len) catch unreachable; - for (obj.properties.slice()) |prop| { - const key = prop.key.?; - const value = prop.value.?; - if (key.isString() and value.isString()) { - var sfb = std.heap.stackFallback(1024, allocator); - const keyhash = try key.asStringHash(sfb.get(), String.Builder.stringHash) orelse unreachable; - const patch_path = string_builder.append(String, value.asString(allocator).?); - lockfile.patched_dependencies.put(allocator, keyhash, .{ .path = patch_path }) catch unreachable; - } - } - } - - bin: { - if (json.asProperty("bin")) |bin| { - switch (bin.expr.data) { - .e_object => |obj| { - switch (obj.properties.len) { - 0 => {}, - 1 => { - const bin_name = obj.properties.ptr[0].key.?.asString(allocator) orelse break :bin; - const value = obj.properties.ptr[0].value.?.asString(allocator) orelse break :bin; - - package.bin = .{ - .tag = .named_file, - .value = .{ - .named_file = .{ - string_builder.append(String, bin_name), - string_builder.append(String, value), - }, - }, - }; - }, - else => { - const current_len = lockfile.buffers.extern_strings.items.len; - const count = @as(usize, obj.properties.len * 2); - try lockfile.buffers.extern_strings.ensureTotalCapacityPrecise( - lockfile.allocator, - current_len + count, - ); - var extern_strings = lockfile.buffers.extern_strings.items.ptr[current_len .. current_len + count]; - lockfile.buffers.extern_strings.items.len += count; - - var i: usize = 0; - for (obj.properties.slice()) |bin_prop| { - extern_strings[i] = string_builder.append(ExternalString, bin_prop.key.?.asString(allocator) orelse break :bin); - i += 1; - extern_strings[i] = string_builder.append(ExternalString, bin_prop.value.?.asString(allocator) orelse break :bin); - i += 1; - } - if (comptime Environment.allow_assert) assert(i == extern_strings.len); - package.bin = .{ - .tag = .map, - .value = .{ .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) }, - }; - }, - } - - break :bin; - }, - .e_string => |stri| { - if (stri.data.len > 0) { - package.bin = .{ - .tag = .file, - .value = .{ - .file = string_builder.append(String, stri.data), - }, - }; - break :bin; - } - }, - else => {}, - } - } - - if (json.asProperty("directories")) |dirs| { - // https://docs.npmjs.com/cli/v8/configuring-npm/package-json#directoriesbin - // Because of the way the bin directive works, - // specifying both a bin path and setting - // directories.bin is an error. If you want to - // specify individual files, use bin, and for all - // the files in an existing bin directory, use - // directories.bin. - if (dirs.expr.asProperty("bin")) |bin_prop| { - if (bin_prop.expr.asString(allocator)) |str_| { - if (str_.len > 0) { - package.bin = .{ - .tag = .dir, - .value = .{ - .dir = string_builder.append(String, str_), - }, - }; - break :bin; + package.name = external_string.value; + package.name_hash = external_string.hash; + break :name; } } } } - } - package.scripts.parseAlloc(allocator, &string_builder, json); - package.scripts.filled = true; - - // It is allowed for duplicate dependencies to exist in optionalDependencies and regular dependencies - if (comptime features.check_for_duplicate_dependencies) { - lockfile.scratch.duplicate_checker_map.clearRetainingCapacity(); - try lockfile.scratch.duplicate_checker_map.ensureTotalCapacity(total_dependencies_count); - } - - var bundled_deps = bun.StringSet.init(allocator); - defer bundled_deps.deinit(); - var bundle_all_deps = false; - if (comptime ResolverContext != void and ResolverContext.checkBundledDependencies()) { - if (json.get("bundleDependencies") orelse json.get("bundledDependencies")) |bundled_deps_expr| { - switch (bundled_deps_expr.data) { - .e_boolean => |boolean| { - bundle_all_deps = boolean.value; - }, - .e_array => |arr| { - for (arr.slice()) |item| { - try bundled_deps.insert(item.asString(allocator) orelse continue); - } - }, - else => {}, - } - } - } - - total_dependencies_count = 0; - - inline for (dependency_groups) |group| { - if (group.behavior.isWorkspace()) { - var seen_workspace_names = TrustedDependenciesSet{}; - defer seen_workspace_names.deinit(allocator); - for (workspace_names.values(), workspace_names.keys()) |entry, path| { - - // workspace names from their package jsons. duplicates not allowed - const gop = try seen_workspace_names.getOrPut(allocator, @truncate(String.Builder.stringHash(entry.name))); - if (gop.found_existing) { - // this path does alot of extra work to format the error message - // but this is ok because the install is going to fail anyways, so this - // has zero effect on the happy path. - var cwd_buf: bun.PathBuffer = undefined; - const cwd = try bun.getcwd(&cwd_buf); - - const num_notes = count: { - var i: usize = 0; - for (workspace_names.values()) |value| { - if (strings.eqlLong(value.name, entry.name, true)) - i += 1; - } - break :count i; - }; - const notes = notes: { - var notes = try allocator.alloc(logger.Data, num_notes); - var i: usize = 0; - for (workspace_names.values(), workspace_names.keys()) |value, note_path| { - if (note_path.ptr == path.ptr) continue; - if (strings.eqlLong(value.name, entry.name, true)) { - const note_abs_path = bun.handleOom(allocator.dupeZ(u8, Path.joinAbsStringZ(cwd, &.{ note_path, "package.json" }, .auto))); - - const note_src = bun.sys.File.toSource(note_abs_path, allocator, .{}).unwrap() catch logger.Source.initEmptyFile(note_abs_path); - - notes[i] = .{ - .text = "Package name is also declared here", - .location = logger.Location.initOrNull(¬e_src, note_src.rangeOfString(value.name_loc)), - }; - i += 1; - } - } - break :notes notes[0..i]; - }; - - const abs_path = Path.joinAbsStringZ(cwd, &.{ path, "package.json" }, .auto); - - const src = bun.sys.File.toSource(abs_path, allocator, .{}).unwrap() catch logger.Source.initEmptyFile(abs_path); - - log.addRangeErrorFmtWithNotes( - &src, - src.rangeOfString(entry.name_loc), - allocator, - notes, - "Workspace name \"{s}\" already exists", - .{ - entry.name, - }, - ) catch {}; - return error.InstallFailed; - } - - const external_name = string_builder.append(ExternalString, entry.name); - - const workspace_version = brk: { - if (entry.version) |version_string| { - const external_version = string_builder.append(ExternalString, version_string); - allocator.free(version_string); - const sliced = external_version.value.sliced(lockfile.buffers.string_bytes.items); - const result = Semver.Version.parse(sliced); - if (result.valid and result.wildcard == .none) { - break :brk result.version.min(); - } - } - - break :brk null; - }; - - if (try parseDependency( - lockfile, - pm, - allocator, - log, - source, - group, + if (comptime !features.is_main) { + if (comptime ResolverContext != void) { + package.resolution = try resolver.resolve( + *Lockfile.StringBuilder, &string_builder, - features, - package_dependencies, - total_dependencies_count, - .workspace, - workspace_version, - external_name, - path, - logger.Loc.Empty, - logger.Loc.Empty, - )) |_dep| { - var dep = _dep; - if (group.behavior.isPeer() and optional_peer_dependencies.contains(external_name.hash)) { - dep.behavior = dep.behavior.add(.optional); - } - - package_dependencies[total_dependencies_count] = dep; - total_dependencies_count += 1; - - try lockfile.workspace_paths.put(allocator, external_name.hash, dep.version.value.workspace); - if (workspace_version) |version| { - try lockfile.workspace_versions.put(allocator, external_name.hash, version); - } - } + json, + ); } } else { - if (json.asProperty(group.prop)) |dependencies_q| { - switch (dependencies_q.expr.data) { - .e_object => |obj| { - for (obj.properties.slice()) |item| { - const key = item.key.?; - const value = item.value.?; - const external_name = string_builder.append(ExternalString, key.asString(allocator).?); - const version = value.asString(allocator) orelse ""; - - if (try parseDependency( - lockfile, - pm, - allocator, - log, - source, - group, - &string_builder, - features, - package_dependencies, - total_dependencies_count, - null, - null, - external_name, - version, - key.loc, - value.loc, - )) |_dep| { - var dep = _dep; - if (group.behavior.isPeer() and optional_peer_dependencies.contains(external_name.hash)) { - dep.behavior.optional = true; - } - - if (bundle_all_deps or bundled_deps.contains(dep.name.slice(lockfile.buffers.string_bytes.items))) { - dep.behavior.bundled = true; - } - - package_dependencies[total_dependencies_count] = dep; - total_dependencies_count += 1; - } - } - }, - else => unreachable, - } - } - } - } - - std.sort.pdq( - Dependency, - package_dependencies[0..total_dependencies_count], - lockfile.buffers.string_bytes.items, - Dependency.isLessThan, - ); - - package.dependencies.off = @as(u32, @truncate(off)); - package.dependencies.len = @as(u32, @truncate(total_dependencies_count)); - - package.resolutions = @as(@TypeOf(package.resolutions), @bitCast(package.dependencies)); - - @memset(lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id); - - const new_len = off + total_dependencies_count; - lockfile.buffers.dependencies.items = lockfile.buffers.dependencies.items.ptr[0..new_len]; - lockfile.buffers.resolutions.items = lockfile.buffers.resolutions.items.ptr[0..new_len]; - - // This function depends on package.dependencies being set, so it is done at the very end. - if (comptime features.is_main) { - try lockfile.overrides.parseAppend(pm, lockfile, package, log, source, json, &string_builder); - - var found_any_catalog_or_catalog_object = false; - var has_workspaces = false; - if (json.get("workspaces")) |workspaces_expr| { - found_any_catalog_or_catalog_object = try lockfile.catalogs.parseAppend(pm, lockfile, log, source, workspaces_expr, &string_builder); - has_workspaces = true; - } - - // `"workspaces"` being an object instead of an array is sometimes - // unexpected to people. therefore if you also are using workspaces, - // allow "catalog" and "catalogs" in top-level "package.json" - // so it's easier to guess. - if (!found_any_catalog_or_catalog_object and has_workspaces) { - _ = try lockfile.catalogs.parseAppend(pm, lockfile, log, source, json, &string_builder); - } - } - - string_builder.clamp(); - } - - pub const List = bun.MultiArrayList(Package); - - pub const Serializer = struct { - pub const sizes = blk: { - const fields = std.meta.fields(Package); - const Data = struct { - size: usize, - size_index: usize, - alignment: usize, - Type: type, - }; - var data: [fields.len]Data = undefined; - for (fields, &data, 0..) |field_info, *elem, i| { - elem.* = .{ - .size = @sizeOf(field_info.type), - .size_index = i, - .Type = field_info.type, - .alignment = if (@sizeOf(field_info.type) == 0) 1 else field_info.alignment, + package.resolution = .{ + .tag = .root, + .value = .{ .root = {} }, }; } - const SortContext = struct { - data: []Data, - pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void { - const tmp = ctx.data[lhs]; - ctx.data[lhs] = ctx.data[rhs]; - ctx.data[rhs] = tmp; - } - pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool { - return ctx.data[lhs].alignment > ctx.data[rhs].alignment; - } - }; - std.sort.insertionContext(0, fields.len, SortContext{ - .data = &data, - }); - var sizes_bytes: [fields.len]usize = undefined; - var field_indexes: [fields.len]usize = undefined; - var Types: [fields.len]type = undefined; - for (data, &sizes_bytes, &field_indexes, &Types) |elem, *size, *index, *Type| { - size.* = elem.size; - index.* = elem.size_index; - Type.* = elem.Type; - } - break :blk .{ - .bytes = sizes_bytes, - .fields = field_indexes, - .Types = Types, - }; - }; - const FieldsEnum = @typeInfo(Package.List.Field).@"enum"; - - pub fn byteSize(list: Package.List) usize { - const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; - const capacity_vector: @Vector(sizes.bytes.len, usize) = @splat(list.len); - return @reduce(.Add, capacity_vector * sizes_vector); - } - - const AlignmentType = sizes.Types[sizes.fields[0]]; - - pub fn save(list: Package.List, comptime StreamType: type, stream: StreamType, comptime Writer: type, writer: Writer) !void { - try writer.writeInt(u64, list.len, .little); - try writer.writeInt(u64, @alignOf(@TypeOf(list.bytes)), .little); - try writer.writeInt(u64, sizes.Types.len, .little); - const begin_at = try stream.getPos(); - try writer.writeInt(u64, 0, .little); - const end_at = try stream.getPos(); - try writer.writeInt(u64, 0, .little); - - _ = try Aligner.write(@TypeOf(list.bytes), Writer, writer, try stream.getPos()); - - const really_begin_at = try stream.getPos(); - var sliced = list.slice(); - - inline for (FieldsEnum.fields) |field| { - const value = sliced.items(@field(Package.List.Field, field.name)); - if (comptime Environment.allow_assert) { - debug("save(\"{s}\") = {d} bytes", .{ field.name, std.mem.sliceAsBytes(value).len }); - if (comptime strings.eqlComptime(field.name, "meta")) { - for (value) |meta| { - assert(meta.has_install_script != .old); - } + if (json.asProperty("patchedDependencies")) |patched_deps| { + const obj = patched_deps.expr.data.e_object; + lockfile.patched_dependencies.ensureTotalCapacity(allocator, obj.properties.len) catch unreachable; + for (obj.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + if (key.isString() and value.isString()) { + var sfb = std.heap.stackFallback(1024, allocator); + const keyhash = try key.asStringHash(sfb.get(), String.Builder.stringHash) orelse unreachable; + const patch_path = string_builder.append(String, value.asString(allocator).?); + lockfile.patched_dependencies.put(allocator, keyhash, .{ .path = patch_path }) catch unreachable; } } - comptime assertNoUninitializedPadding(@TypeOf(value)); - try writer.writeAll(std.mem.sliceAsBytes(value)); } - const really_end_at = try stream.getPos(); + bin: { + if (json.asProperty("bin")) |bin| { + switch (bin.expr.data) { + .e_object => |obj| { + switch (obj.properties.len) { + 0 => {}, + 1 => { + const bin_name = obj.properties.ptr[0].key.?.asString(allocator) orelse break :bin; + const value = obj.properties.ptr[0].value.?.asString(allocator) orelse break :bin; - _ = stream.pwrite(std.mem.asBytes(&really_begin_at), begin_at); - _ = stream.pwrite(std.mem.asBytes(&really_end_at), end_at); - } + package.bin = .{ + .tag = .named_file, + .value = .{ + .named_file = .{ + string_builder.append(String, bin_name), + string_builder.append(String, value), + }, + }, + }; + }, + else => { + const current_len = lockfile.buffers.extern_strings.items.len; + const count = @as(usize, obj.properties.len * 2); + try lockfile.buffers.extern_strings.ensureTotalCapacityPrecise( + lockfile.allocator, + current_len + count, + ); + var extern_strings = lockfile.buffers.extern_strings.items.ptr[current_len .. current_len + count]; + lockfile.buffers.extern_strings.items.len += count; - const PackagesLoadResult = struct { - list: Package.List, - needs_update: bool = false, - }; + var i: usize = 0; + for (obj.properties.slice()) |bin_prop| { + extern_strings[i] = string_builder.append(ExternalString, bin_prop.key.?.asString(allocator) orelse break :bin); + i += 1; + extern_strings[i] = string_builder.append(ExternalString, bin_prop.value.?.asString(allocator) orelse break :bin); + i += 1; + } + if (comptime Environment.allow_assert) assert(i == extern_strings.len); + package.bin = .{ + .tag = .map, + .value = .{ .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) }, + }; + }, + } - pub fn load( - stream: *Stream, - end: usize, - allocator: Allocator, - ) !PackagesLoadResult { - var reader = stream.reader(); + break :bin; + }, + .e_string => |stri| { + if (stri.data.len > 0) { + package.bin = .{ + .tag = .file, + .value = .{ + .file = string_builder.append(String, stri.data), + }, + }; + break :bin; + } + }, + else => {}, + } + } - const list_len = try reader.readInt(u64, .little); - if (list_len > std.math.maxInt(u32) - 1) - return error.@"Lockfile validation failed: list is impossibly long"; - - const input_alignment = try reader.readInt(u64, .little); - - var list = Package.List{}; - const Alingee = @TypeOf(list.bytes); - const expected_alignment = @alignOf(Alingee); - if (expected_alignment != input_alignment) { - return error.@"Lockfile validation failed: alignment mismatch"; - } - - const field_count = try reader.readInt(u64, .little); - switch (field_count) { - sizes.Types.len => {}, - // "scripts" field is absent before v0.6.8 - // we will back-fill from each package.json - sizes.Types.len - 1 => {}, - else => { - return error.@"Lockfile validation failed: unexpected number of package fields"; - }, - } - - const begin_at = try reader.readInt(u64, .little); - const end_at = try reader.readInt(u64, .little); - if (begin_at > end or end_at > end or begin_at > end_at) { - return error.@"Lockfile validation failed: invalid package list range"; - } - stream.pos = begin_at; - try list.ensureTotalCapacity(allocator, list_len); - list.len = list_len; - var sliced = list.slice(); - - var needs_update = false; - inline for (FieldsEnum.fields) |field| { - const value = sliced.items(@field(Package.List.Field, field.name)); - - comptime assertNoUninitializedPadding(@TypeOf(value)); - const bytes = std.mem.sliceAsBytes(value); - const end_pos = stream.pos + bytes.len; - if (end_pos <= end_at) { - @memcpy(bytes, stream.buffer[stream.pos..][0..bytes.len]); - stream.pos = end_pos; - if (comptime strings.eqlComptime(field.name, "meta")) { - // need to check if any values were created from an older version of bun - // (currently just `has_install_script`). If any are found, the values need - // to be updated before saving the lockfile. - for (value) |*meta| { - if (meta.needsUpdate()) { - needs_update = true; - break; + if (json.asProperty("directories")) |dirs| { + // https://docs.npmjs.com/cli/v8/configuring-npm/package-json#directoriesbin + // Because of the way the bin directive works, + // specifying both a bin path and setting + // directories.bin is an error. If you want to + // specify individual files, use bin, and for all + // the files in an existing bin directory, use + // directories.bin. + if (dirs.expr.asProperty("bin")) |bin_prop| { + if (bin_prop.expr.asString(allocator)) |str_| { + if (str_.len > 0) { + package.bin = .{ + .tag = .dir, + .value = .{ + .dir = string_builder.append(String, str_), + }, + }; + break :bin; } } } - } else if (comptime strings.eqlComptime(field.name, "scripts")) { - @memset(bytes, 0); - } else { - return error.@"Lockfile validation failed: invalid package list range"; } } - return .{ - .list = list, - .needs_update = needs_update, - }; + package.scripts.parseAlloc(allocator, &string_builder, json); + package.scripts.filled = true; + + // It is allowed for duplicate dependencies to exist in optionalDependencies and regular dependencies + if (comptime features.check_for_duplicate_dependencies) { + lockfile.scratch.duplicate_checker_map.clearRetainingCapacity(); + try lockfile.scratch.duplicate_checker_map.ensureTotalCapacity(total_dependencies_count); + } + + var bundled_deps = bun.StringSet.init(allocator); + defer bundled_deps.deinit(); + var bundle_all_deps = false; + if (comptime ResolverContext != void and ResolverContext.checkBundledDependencies()) { + if (json.get("bundleDependencies") orelse json.get("bundledDependencies")) |bundled_deps_expr| { + switch (bundled_deps_expr.data) { + .e_boolean => |boolean| { + bundle_all_deps = boolean.value; + }, + .e_array => |arr| { + for (arr.slice()) |item| { + try bundled_deps.insert(item.asString(allocator) orelse continue); + } + }, + else => {}, + } + } + } + + total_dependencies_count = 0; + + inline for (dependency_groups) |group| { + if (group.behavior.isWorkspace()) { + var seen_workspace_names = TrustedDependenciesSet{}; + defer seen_workspace_names.deinit(allocator); + for (workspace_names.values(), workspace_names.keys()) |entry, path| { + + // workspace names from their package jsons. duplicates not allowed + const gop = try seen_workspace_names.getOrPut(allocator, @truncate(String.Builder.stringHash(entry.name))); + if (gop.found_existing) { + // this path does alot of extra work to format the error message + // but this is ok because the install is going to fail anyways, so this + // has zero effect on the happy path. + var cwd_buf: bun.PathBuffer = undefined; + const cwd = try bun.getcwd(&cwd_buf); + + const num_notes = count: { + var i: usize = 0; + for (workspace_names.values()) |value| { + if (strings.eqlLong(value.name, entry.name, true)) + i += 1; + } + break :count i; + }; + const notes = notes: { + var notes = try allocator.alloc(logger.Data, num_notes); + var i: usize = 0; + for (workspace_names.values(), workspace_names.keys()) |value, note_path| { + if (note_path.ptr == path.ptr) continue; + if (strings.eqlLong(value.name, entry.name, true)) { + const note_abs_path = bun.handleOom(allocator.dupeZ(u8, Path.joinAbsStringZ(cwd, &.{ note_path, "package.json" }, .auto))); + + const note_src = bun.sys.File.toSource(note_abs_path, allocator, .{}).unwrap() catch logger.Source.initEmptyFile(note_abs_path); + + notes[i] = .{ + .text = "Package name is also declared here", + .location = logger.Location.initOrNull(¬e_src, note_src.rangeOfString(value.name_loc)), + }; + i += 1; + } + } + break :notes notes[0..i]; + }; + + const abs_path = Path.joinAbsStringZ(cwd, &.{ path, "package.json" }, .auto); + + const src = bun.sys.File.toSource(abs_path, allocator, .{}).unwrap() catch logger.Source.initEmptyFile(abs_path); + + log.addRangeErrorFmtWithNotes( + &src, + src.rangeOfString(entry.name_loc), + allocator, + notes, + "Workspace name \"{s}\" already exists", + .{ + entry.name, + }, + ) catch {}; + return error.InstallFailed; + } + + const external_name = string_builder.append(ExternalString, entry.name); + + const workspace_version = brk: { + if (entry.version) |version_string| { + const external_version = string_builder.append(ExternalString, version_string); + allocator.free(version_string); + const sliced = external_version.value.sliced(lockfile.buffers.string_bytes.items); + const result = Semver.Version.parse(sliced); + if (result.valid and result.wildcard == .none) { + break :brk result.version.min(); + } + } + + break :brk null; + }; + + if (try parseDependency( + lockfile, + pm, + allocator, + log, + source, + group, + &string_builder, + features, + package_dependencies, + total_dependencies_count, + .workspace, + workspace_version, + external_name, + path, + logger.Loc.Empty, + logger.Loc.Empty, + )) |_dep| { + var dep = _dep; + if (group.behavior.isPeer() and optional_peer_dependencies.contains(external_name.hash)) { + dep.behavior = dep.behavior.add(.optional); + } + + package_dependencies[total_dependencies_count] = dep; + total_dependencies_count += 1; + + try lockfile.workspace_paths.put(allocator, external_name.hash, dep.version.value.workspace); + if (workspace_version) |version| { + try lockfile.workspace_versions.put(allocator, external_name.hash, version); + } + } + } + } else { + if (json.asProperty(group.prop)) |dependencies_q| { + switch (dependencies_q.expr.data) { + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const key = item.key.?; + const value = item.value.?; + const external_name = string_builder.append(ExternalString, key.asString(allocator).?); + const version = value.asString(allocator) orelse ""; + + if (try parseDependency( + lockfile, + pm, + allocator, + log, + source, + group, + &string_builder, + features, + package_dependencies, + total_dependencies_count, + null, + null, + external_name, + version, + key.loc, + value.loc, + )) |_dep| { + var dep = _dep; + if (group.behavior.isPeer() and optional_peer_dependencies.contains(external_name.hash)) { + dep.behavior.optional = true; + } + + if (bundle_all_deps or bundled_deps.contains(dep.name.slice(lockfile.buffers.string_bytes.items))) { + dep.behavior.bundled = true; + } + + package_dependencies[total_dependencies_count] = dep; + total_dependencies_count += 1; + } + } + }, + else => unreachable, + } + } + } + } + + std.sort.pdq( + Dependency, + package_dependencies[0..total_dependencies_count], + lockfile.buffers.string_bytes.items, + Dependency.isLessThan, + ); + + package.dependencies.off = @as(u32, @truncate(off)); + package.dependencies.len = @as(u32, @truncate(total_dependencies_count)); + + package.resolutions = @as(@TypeOf(package.resolutions), @bitCast(package.dependencies)); + + @memset(lockfile.buffers.resolutions.items.ptr[off..total_len], invalid_package_id); + + const new_len = off + total_dependencies_count; + lockfile.buffers.dependencies.items = lockfile.buffers.dependencies.items.ptr[0..new_len]; + lockfile.buffers.resolutions.items = lockfile.buffers.resolutions.items.ptr[0..new_len]; + + // This function depends on package.dependencies being set, so it is done at the very end. + if (comptime features.is_main) { + try lockfile.overrides.parseAppend(pm, lockfile, package, log, source, json, &string_builder); + + var found_any_catalog_or_catalog_object = false; + var has_workspaces = false; + if (json.get("workspaces")) |workspaces_expr| { + found_any_catalog_or_catalog_object = try lockfile.catalogs.parseAppend(pm, lockfile, log, source, workspaces_expr, &string_builder); + has_workspaces = true; + } + + // `"workspaces"` being an object instead of an array is sometimes + // unexpected to people. therefore if you also are using workspaces, + // allow "catalog" and "catalogs" in top-level "package.json" + // so it's easier to guess. + if (!found_any_catalog_or_catalog_object and has_workspaces) { + _ = try lockfile.catalogs.parseAppend(pm, lockfile, log, source, json, &string_builder); + } + } + + string_builder.clamp(); } + + pub const List = bun.MultiArrayList(PackageType); + + pub const Serializer = struct { + pub const sizes = blk: { + const fields = std.meta.fields(PackageType); + const Data = struct { + size: usize, + size_index: usize, + alignment: usize, + Type: type, + }; + var data: [fields.len]Data = undefined; + for (fields, &data, 0..) |field_info, *elem, i| { + elem.* = .{ + .size = @sizeOf(field_info.type), + .size_index = i, + .Type = field_info.type, + .alignment = if (@sizeOf(field_info.type) == 0) 1 else field_info.alignment, + }; + } + const SortContext = struct { + data: []Data, + pub fn swap(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) void { + const tmp = ctx.data[lhs]; + ctx.data[lhs] = ctx.data[rhs]; + ctx.data[rhs] = tmp; + } + pub fn lessThan(comptime ctx: @This(), comptime lhs: usize, comptime rhs: usize) bool { + return ctx.data[lhs].alignment > ctx.data[rhs].alignment; + } + }; + std.sort.insertionContext(0, fields.len, SortContext{ + .data = &data, + }); + var sizes_bytes: [fields.len]usize = undefined; + var field_indexes: [fields.len]usize = undefined; + var Types: [fields.len]type = undefined; + for (data, &sizes_bytes, &field_indexes, &Types) |elem, *size, *index, *Type| { + size.* = elem.size; + index.* = elem.size_index; + Type.* = elem.Type; + } + break :blk .{ + .bytes = sizes_bytes, + .fields = field_indexes, + .Types = Types, + }; + }; + + const FieldsEnum = @typeInfo(List.Field).@"enum"; + + pub fn byteSize(list: List) usize { + const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes; + const capacity_vector: @Vector(sizes.bytes.len, usize) = @splat(list.len); + return @reduce(.Add, capacity_vector * sizes_vector); + } + + const AlignmentType = sizes.Types[sizes.fields[0]]; + + pub fn save(list: List, comptime StreamType: type, stream: StreamType, comptime Writer: type, writer: Writer) !void { + try writer.writeInt(u64, list.len, .little); + try writer.writeInt(u64, @alignOf(@TypeOf(list.bytes)), .little); + try writer.writeInt(u64, sizes.Types.len, .little); + const begin_at = try stream.getPos(); + try writer.writeInt(u64, 0, .little); + const end_at = try stream.getPos(); + try writer.writeInt(u64, 0, .little); + + _ = try Aligner.write(@TypeOf(list.bytes), Writer, writer, try stream.getPos()); + + const really_begin_at = try stream.getPos(); + var sliced = list.slice(); + + inline for (FieldsEnum.fields) |field| { + const value = sliced.items(@field(List.Field, field.name)); + if (comptime Environment.allow_assert) { + debug("save(\"{s}\") = {d} bytes", .{ field.name, std.mem.sliceAsBytes(value).len }); + if (comptime strings.eqlComptime(field.name, "meta")) { + for (value) |meta| { + assert(meta.has_install_script != .old); + } + } + } + comptime assertNoUninitializedPadding(@TypeOf(value)); + try writer.writeAll(std.mem.sliceAsBytes(value)); + } + + const really_end_at = try stream.getPos(); + + _ = stream.pwrite(std.mem.asBytes(&really_begin_at), begin_at); + _ = stream.pwrite(std.mem.asBytes(&really_end_at), end_at); + } + + const PackagesLoadResult = struct { + list: List, + needs_update: bool = false, + }; + + pub fn load( + stream: *Stream, + end: usize, + allocator: Allocator, + migrate_from_v2: bool, + ) !PackagesLoadResult { + var reader = stream.reader(); + + const list_len = try reader.readInt(u64, .little); + if (list_len > std.math.maxInt(u32) - 1) + return error.@"Lockfile validation failed: list is impossibly long"; + + const input_alignment = try reader.readInt(u64, .little); + + var list = List{}; + + const Alingee = @TypeOf(list.bytes); + const expected_alignment = @alignOf(Alingee); + if (expected_alignment != input_alignment) { + return error.@"Lockfile validation failed: alignment mismatch"; + } + + const field_count = try reader.readInt(u64, .little); + switch (field_count) { + sizes.Types.len => {}, + // "scripts" field is absent before v0.6.8 + // we will back-fill from each package.json + sizes.Types.len - 1 => {}, + else => { + return error.@"Lockfile validation failed: unexpected number of package fields"; + }, + } + + const begin_at = try reader.readInt(u64, .little); + const end_at = try reader.readInt(u64, .little); + if (begin_at > end or end_at > end or begin_at > end_at) { + return error.@"Lockfile validation failed: invalid package list range"; + } + stream.pos = begin_at; + try list.ensureTotalCapacity(allocator, list_len); + + var needs_update = false; + if (migrate_from_v2) { + const OldPackageV2 = Package(u32); + var list_for_migrating_from_v2 = OldPackageV2.List{}; + defer list_for_migrating_from_v2.deinit(allocator); + + try list_for_migrating_from_v2.ensureTotalCapacity(allocator, list_len); + list_for_migrating_from_v2.len = list_len; + + try loadFields(stream, OldPackageV2.List, &list_for_migrating_from_v2, &needs_update); + + for (0..list_for_migrating_from_v2.len) |_pkg_id| { + const pkg_id: PackageID = @intCast(_pkg_id); + const old = list_for_migrating_from_v2.get(pkg_id); + const new: PackageType = .{ + .name = old.name, + .name_hash = old.name_hash, + .meta = old.meta, + .bin = old.bin, + .dependencies = old.dependencies, + .resolutions = old.resolutions, + .scripts = old.scripts, + .resolution = switch (old.resolution.tag) { + .uninitialized => .init(.{ .uninitialized = old.resolution.value.uninitialized }), + .root => .init(.{ .root = old.resolution.value.root }), + .npm => .init(.{ .npm = old.resolution.value.npm.migrate() }), + .folder => .init(.{ .folder = old.resolution.value.folder }), + .local_tarball => .init(.{ .local_tarball = old.resolution.value.local_tarball }), + .github => .init(.{ .github = old.resolution.value.github }), + .git => .init(.{ .git = old.resolution.value.git }), + .symlink => .init(.{ .symlink = old.resolution.value.symlink }), + .workspace => .init(.{ .workspace = old.resolution.value.workspace }), + .remote_tarball => .init(.{ .remote_tarball = old.resolution.value.remote_tarball }), + .single_file_module => .init(.{ .single_file_module = old.resolution.value.single_file_module }), + else => .init(.{ .uninitialized = {} }), + }, + }; + + list.appendAssumeCapacity(new); + } + } else { + list.len = list_len; + try loadFields(stream, List, &list, &needs_update); + } + + return .{ + .list = list, + .needs_update = needs_update, + }; + } + + fn loadFields(stream: *Stream, comptime ListType: type, list: *ListType, needs_update: *bool) !void { + var sliced = list.slice(); + + inline for (FieldsEnum.fields) |field| { + const value = sliced.items(@field(List.Field, field.name)); + + comptime assertNoUninitializedPadding(@TypeOf(value)); + const bytes = std.mem.sliceAsBytes(value); + const end_pos = stream.pos + bytes.len; + if (end_pos <= end_pos) { + @memcpy(bytes, stream.buffer[stream.pos..][0..bytes.len]); + stream.pos = end_pos; + if (comptime strings.eqlComptime(field.name, "meta")) { + // need to check if any values were created from an older version of bun + // (currently just `has_install_script`). If any are found, the values need + // to be updated before saving the lockfile. + for (value) |*meta| { + if (meta.needsUpdate()) { + needs_update.* = true; + break; + } + } + } + } else if (comptime strings.eqlComptime(field.name, "scripts")) { + @memset(bytes, 0); + } else { + return error.@"Lockfile validation failed: invalid package list range"; + } + } + } + }; }; -}; +} const string = []const u8; const std = @import("std"); +const ResolutionType = @import("../resolution.zig").ResolutionType; const Allocator = std.mem.Allocator; const bun = @import("bun"); @@ -2167,7 +2222,6 @@ const PackageID = bun.install.PackageID; const PackageManager = install.PackageManager; const PackageNameHash = install.PackageNameHash; const Repository = install.Repository; -const Resolution = bun.install.Resolution; const TruncatedPackageNameHash = install.TruncatedPackageNameHash; const initializeStore = install.initializeStore; const invalid_package_id = install.invalid_package_id; diff --git a/src/install/lockfile/bun.lockb.zig b/src/install/lockfile/bun.lockb.zig index 64e020eb4f..d70f79764b 100644 --- a/src/install/lockfile/bun.lockb.zig +++ b/src/install/lockfile/bun.lockb.zig @@ -253,6 +253,7 @@ pub fn save(this: *Lockfile, verbose_log: bool, bytes: *std.ArrayList(u8), total pub const SerializerLoadResult = struct { packages_need_update: bool = false, + migrated_from_lockb_v2: bool = false, }; pub fn load( @@ -271,9 +272,20 @@ pub fn load( return error.InvalidLockfile; } + var migrate_from_v2 = false; const format = try reader.readInt(u32, .little); - if (format != @intFromEnum(Lockfile.FormatVersion.current)) { - return error.@"Outdated lockfile version"; + if (format > @intFromEnum(Lockfile.FormatVersion.current)) { + return error.@"Unexpected lockfile version"; + } + + if (format < @intFromEnum(Lockfile.FormatVersion.current)) { + + // we only allow migrating from v2 to v3 or above + if (format != @intFromEnum(Lockfile.FormatVersion.v2)) { + return error.@"Outdated lockfile version"; + } + + migrate_from_v2 = true; } lockfile.format = Lockfile.FormatVersion.current; @@ -290,10 +302,13 @@ pub fn load( stream, total_buffer_size, allocator, + migrate_from_v2, ); lockfile.packages = packages_load_result.list; + res.packages_need_update = packages_load_result.needs_update; + res.migrated_from_lockb_v2 = migrate_from_v2; lockfile.buffers = try Lockfile.Buffers.load( stream, diff --git a/src/install/npm.zig b/src/install/npm.zig index e390e0abf5..c3eed59b2b 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -932,7 +932,8 @@ pub const PackageManifest = struct { // - v0.0.3: added serialization of registry url. it's used to invalidate when it changes // - v0.0.4: fixed bug with cpu & os tag not being added correctly // - v0.0.5: added bundled dependencies - pub const version = "bun-npm-manifest-cache-v0.0.5\n"; + // - v0.0.6: changed semver major/minor/patch to each use u64 instead of u32 + pub const version = "bun-npm-manifest-cache-v0.0.6\n"; const header_bytes: string = "#!/usr/bin/env bun\n" ++ version; pub const sizes = blk: { diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 9ceec439a4..70b7542933 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -1,438 +1,445 @@ -pub const Resolution = extern struct { - tag: Tag = .uninitialized, - _padding: [7]u8 = .{0} ** 7, - value: Value = .{ .uninitialized = {} }, +pub const Resolution = ResolutionType(u64); +pub const OldV2Resolution = ResolutionType(u32); - /// Use like Resolution.init(.{ .npm = VersionedURL{ ... } }) - pub inline fn init(value: bun.meta.Tagged(Value, Tag)) Resolution { - return Resolution{ - .tag = std.meta.activeTag(value), - .value = Value.init(value), - }; - } +pub fn ResolutionType(comptime SemverIntType: type) type { + return extern struct { + tag: Tag = .uninitialized, + _padding: [7]u8 = .{0} ** 7, + value: Value = .{ .uninitialized = {} }, - pub fn isGit(this: *const Resolution) bool { - return this.tag.isGit(); - } + const This = @This(); - pub fn canEnqueueInstallTask(this: *const Resolution) bool { - return this.tag.canEnqueueInstallTask(); - } - - const FromTextLockfileError = OOM || error{ - UnexpectedResolution, - InvalidSemver, - }; - - pub fn fromTextLockfile(res_str: string, string_buf: *String.Buf) FromTextLockfileError!Resolution { - if (strings.hasPrefixComptime(res_str, "root:")) { - return Resolution.init(.{ .root = {} }); - } - - if (strings.withoutPrefixIfPossibleComptime(res_str, "link:")) |link| { - return Resolution.init(.{ .symlink = try string_buf.append(link) }); - } - - if (strings.withoutPrefixIfPossibleComptime(res_str, "workspace:")) |workspace| { - return Resolution.init(.{ .workspace = try string_buf.append(workspace) }); - } - - if (strings.withoutPrefixIfPossibleComptime(res_str, "file:")) |folder| { - return Resolution.init(.{ .folder = try string_buf.append(folder) }); - } - - return switch (Dependency.Version.Tag.infer(res_str)) { - .git => Resolution.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }), - .github => Resolution.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }), - .tarball => { - if (Dependency.isRemoteTarball(res_str)) { - return Resolution.init(.{ .remote_tarball = try string_buf.append(res_str) }); - } - - return Resolution.init(.{ .local_tarball = try string_buf.append(res_str) }); - }, - .npm => { - const version_literal = try string_buf.append(res_str); - const parsed = Semver.Version.parse(version_literal.sliced(string_buf.bytes.items)); - - if (!parsed.valid) { - return error.UnexpectedResolution; - } - - if (parsed.version.major == null or parsed.version.minor == null or parsed.version.patch == null) { - return error.UnexpectedResolution; - } - - return .{ - .tag = .npm, - .value = .{ - .npm = .{ - .version = parsed.version.min(), - - // will fill this later - .url = .{}, - }, - }, - }; - }, - - // covered above - .workspace => error.UnexpectedResolution, - .symlink => error.UnexpectedResolution, - .folder => error.UnexpectedResolution, - - // even though it's a dependency type, it's not - // possible for 'catalog:' to be written to the - // lockfile for any resolution because the install - // will fail it it's not successfully replaced by - // a version - .catalog => error.UnexpectedResolution, - - // should not happen - .dist_tag => error.UnexpectedResolution, - .uninitialized => error.UnexpectedResolution, - }; - } - - pub fn order( - lhs: *const Resolution, - rhs: *const Resolution, - lhs_buf: []const u8, - rhs_buf: []const u8, - ) std.math.Order { - if (lhs.tag != rhs.tag) { - return std.math.order(@intFromEnum(lhs.tag), @intFromEnum(rhs.tag)); - } - - return switch (lhs.tag) { - .npm => lhs.value.npm.order(rhs.value.npm, lhs_buf, rhs_buf), - .local_tarball => lhs.value.local_tarball.order(&rhs.value.local_tarball, lhs_buf, rhs_buf), - .folder => lhs.value.folder.order(&rhs.value.folder, lhs_buf, rhs_buf), - .remote_tarball => lhs.value.remote_tarball.order(&rhs.value.remote_tarball, lhs_buf, rhs_buf), - .workspace => lhs.value.workspace.order(&rhs.value.workspace, lhs_buf, rhs_buf), - .symlink => lhs.value.symlink.order(&rhs.value.symlink, lhs_buf, rhs_buf), - .single_file_module => lhs.value.single_file_module.order(&rhs.value.single_file_module, lhs_buf, rhs_buf), - .git => lhs.value.git.order(&rhs.value.git, lhs_buf, rhs_buf), - .github => lhs.value.github.order(&rhs.value.github, lhs_buf, rhs_buf), - else => .eq, - }; - } - - pub fn count(this: *const Resolution, buf: []const u8, comptime Builder: type, builder: Builder) void { - switch (this.tag) { - .npm => this.value.npm.count(buf, Builder, builder), - .local_tarball => builder.count(this.value.local_tarball.slice(buf)), - .folder => builder.count(this.value.folder.slice(buf)), - .remote_tarball => builder.count(this.value.remote_tarball.slice(buf)), - .workspace => builder.count(this.value.workspace.slice(buf)), - .symlink => builder.count(this.value.symlink.slice(buf)), - .single_file_module => builder.count(this.value.single_file_module.slice(buf)), - .git => this.value.git.count(buf, Builder, builder), - .github => this.value.github.count(buf, Builder, builder), - else => {}, - } - } - - pub fn clone(this: *const Resolution, buf: []const u8, comptime Builder: type, builder: Builder) Resolution { - return .{ - .tag = this.tag, - .value = switch (this.tag) { - .npm => Value.init(.{ .npm = this.value.npm.clone(buf, Builder, builder) }), - .local_tarball => Value.init(.{ - .local_tarball = builder.append(String, this.value.local_tarball.slice(buf)), - }), - .folder => Value.init(.{ - .folder = builder.append(String, this.value.folder.slice(buf)), - }), - .remote_tarball => Value.init(.{ - .remote_tarball = builder.append(String, this.value.remote_tarball.slice(buf)), - }), - .workspace => Value.init(.{ - .workspace = builder.append(String, this.value.workspace.slice(buf)), - }), - .symlink => Value.init(.{ - .symlink = builder.append(String, this.value.symlink.slice(buf)), - }), - .single_file_module => Value.init(.{ - .single_file_module = builder.append(String, this.value.single_file_module.slice(buf)), - }), - .git => Value.init(.{ - .git = this.value.git.clone(buf, Builder, builder), - }), - .github => Value.init(.{ - .github = this.value.github.clone(buf, Builder, builder), - }), - .root => Value.init(.{ .root = {} }), - else => { - std.debug.panic("Internal error: unexpected resolution tag: {}", .{this.tag}); - }, - }, - }; - } - - pub fn fmt(this: *const Resolution, string_bytes: []const u8, path_sep: bun.fmt.PathFormatOptions.Sep) Formatter { - return Formatter{ - .resolution = this, - .buf = string_bytes, - .path_sep = path_sep, - }; - } - - const StorePathFormatter = struct { - res: *const Resolution, - string_buf: string, - // opts: String.StorePathFormatter.Options, - - pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { - const string_buf = this.string_buf; - const res = this.res.value; - switch (this.res.tag) { - .root => try writer.writeAll("root"), - .npm => try writer.print("{}", .{res.npm.version.fmt(string_buf)}), - .local_tarball => try writer.print("{}", .{res.local_tarball.fmtStorePath(string_buf)}), - .remote_tarball => try writer.print("{}", .{res.remote_tarball.fmtStorePath(string_buf)}), - .folder => try writer.print("{}", .{res.folder.fmtStorePath(string_buf)}), - .git => try writer.print("{}", .{res.git.fmtStorePath("git+", string_buf)}), - .github => try writer.print("{}", .{res.github.fmtStorePath("github+", string_buf)}), - .workspace => try writer.print("{}", .{res.workspace.fmtStorePath(string_buf)}), - .symlink => try writer.print("{}", .{res.symlink.fmtStorePath(string_buf)}), - .single_file_module => try writer.print("{}", .{res.single_file_module.fmtStorePath(string_buf)}), - else => {}, - } - } - }; - - pub fn fmtStorePath(this: *const Resolution, string_buf: string) StorePathFormatter { - return .{ - .res = this, - .string_buf = string_buf, - }; - } - - pub fn fmtURL(this: *const Resolution, string_bytes: []const u8) URLFormatter { - return URLFormatter{ .resolution = this, .buf = string_bytes }; - } - - pub fn fmtForDebug(this: *const Resolution, string_bytes: []const u8) DebugFormatter { - return DebugFormatter{ .resolution = this, .buf = string_bytes }; - } - - pub fn eql( - lhs: *const Resolution, - rhs: *const Resolution, - lhs_string_buf: []const u8, - rhs_string_buf: []const u8, - ) bool { - if (lhs.tag != rhs.tag) return false; - - return switch (lhs.tag) { - .root => true, - .npm => lhs.value.npm.eql(rhs.value.npm), - .local_tarball => lhs.value.local_tarball.eql( - rhs.value.local_tarball, - lhs_string_buf, - rhs_string_buf, - ), - .folder => lhs.value.folder.eql( - rhs.value.folder, - lhs_string_buf, - rhs_string_buf, - ), - .remote_tarball => lhs.value.remote_tarball.eql( - rhs.value.remote_tarball, - lhs_string_buf, - rhs_string_buf, - ), - .workspace => lhs.value.workspace.eql( - rhs.value.workspace, - lhs_string_buf, - rhs_string_buf, - ), - .symlink => lhs.value.symlink.eql( - rhs.value.symlink, - lhs_string_buf, - rhs_string_buf, - ), - .single_file_module => lhs.value.single_file_module.eql( - rhs.value.single_file_module, - lhs_string_buf, - rhs_string_buf, - ), - .git => lhs.value.git.eql( - &rhs.value.git, - lhs_string_buf, - rhs_string_buf, - ), - .github => lhs.value.github.eql( - &rhs.value.github, - lhs_string_buf, - rhs_string_buf, - ), - else => unreachable, - }; - } - - pub const URLFormatter = struct { - resolution: *const Resolution, - - buf: []const u8, - - pub fn format(formatter: URLFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { - const buf = formatter.buf; - const value = formatter.resolution.value; - switch (formatter.resolution.tag) { - .npm => try writer.writeAll(value.npm.url.slice(formatter.buf)), - .local_tarball => try bun.fmt.fmtPath(u8, value.local_tarball.slice(buf), .{ .path_sep = .posix }).format("", {}, writer), - .folder => try writer.writeAll(value.folder.slice(formatter.buf)), - .remote_tarball => try writer.writeAll(value.remote_tarball.slice(formatter.buf)), - .git => try value.git.formatAs("git+", formatter.buf, layout, opts, writer), - .github => try value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .workspace => try std.fmt.format(writer, "workspace:{s}", .{value.workspace.slice(formatter.buf)}), - .symlink => try std.fmt.format(writer, "link:{s}", .{value.symlink.slice(formatter.buf)}), - .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(formatter.buf)}), - else => {}, - } - } - }; - - pub const Formatter = struct { - resolution: *const Resolution, - buf: []const u8, - path_sep: bun.fmt.PathFormatOptions.Sep, - - pub fn format(formatter: Formatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { - const buf = formatter.buf; - const value = formatter.resolution.value; - switch (formatter.resolution.tag) { - .npm => try value.npm.version.fmt(buf).format(layout, opts, writer), - .local_tarball => try bun.fmt.fmtPath(u8, value.local_tarball.slice(buf), .{ .path_sep = formatter.path_sep }).format("", {}, writer), - .folder => try bun.fmt.fmtPath(u8, value.folder.slice(buf), .{ .path_sep = formatter.path_sep }).format("", {}, writer), - .remote_tarball => try writer.writeAll(value.remote_tarball.slice(buf)), - .git => try value.git.formatAs("git+", buf, layout, opts, writer), - .github => try value.github.formatAs("github:", buf, layout, opts, writer), - .workspace => try std.fmt.format(writer, "workspace:{s}", .{bun.fmt.fmtPath(u8, value.workspace.slice(buf), .{ - .path_sep = formatter.path_sep, - })}), - .symlink => try std.fmt.format(writer, "link:{s}", .{bun.fmt.fmtPath(u8, value.symlink.slice(buf), .{ - .path_sep = formatter.path_sep, - })}), - .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(buf)}), - else => {}, - } - } - }; - - pub const DebugFormatter = struct { - resolution: *const Resolution, - buf: []const u8, - - pub fn format(formatter: DebugFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { - try writer.writeAll("Resolution{ ."); - try writer.writeAll(bun.tagName(Tag, formatter.resolution.tag) orelse "invalid"); - try writer.writeAll(" = "); - switch (formatter.resolution.tag) { - .npm => try formatter.resolution.value.npm.version.fmt(formatter.buf).format(layout, opts, writer), - .local_tarball => try writer.writeAll(formatter.resolution.value.local_tarball.slice(formatter.buf)), - .folder => try writer.writeAll(formatter.resolution.value.folder.slice(formatter.buf)), - .remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)), - .git => try formatter.resolution.value.git.formatAs("git+", formatter.buf, layout, opts, writer), - .github => try formatter.resolution.value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .workspace => try std.fmt.format(writer, "workspace:{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), - .symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), - .single_file_module => try std.fmt.format(writer, "module:{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), - else => try writer.writeAll("{}"), - } - try writer.writeAll(" }"); - } - }; - - pub const Value = extern union { - uninitialized: void, - root: void, - - npm: VersionedURL, - - folder: String, - - /// File path to a tarball relative to the package root - local_tarball: String, - - github: Repository, - - git: Repository, - - /// global link - symlink: String, - - workspace: String, - - /// URL to a tarball. - remote_tarball: String, - - single_file_module: String, - - /// To avoid undefined memory between union values, we must zero initialize the union first. - pub fn init(field: bun.meta.Tagged(Value, Tag)) Value { - return switch (field) { - inline else => |v, t| @unionInit(Value, @tagName(t), v), + /// Use like Resolution.init(.{ .npm = VersionedURL{ ... } }) + pub inline fn init(value: bun.meta.Tagged(Value, Tag)) This { + return .{ + .tag = std.meta.activeTag(value), + .value = Value.init(value), }; } - }; - pub const Tag = enum(u8) { - uninitialized = 0, - root = 1, - npm = 2, - folder = 4, - - local_tarball = 8, - - github = 16, - - git = 32, - - symlink = 64, - - workspace = 72, - - remote_tarball = 80, - - // This is a placeholder for now. - // But the intent is to eventually support URL imports at the package manager level. - // - // There are many ways to do it, but perhaps one way to be maximally compatible is just removing the protocol part of the URL. - // - // For example, bun would transform this input: - // - // import _ from "https://github.com/lodash/lodash/lodash.min.js"; - // - // Into: - // - // import _ from "github.com/lodash/lodash/lodash.min.js"; - // - // github.com would become a package, with it's own package.json - // This is similar to how Go does it, except it wouldn't clone the whole repo. - // There are more efficient ways to do this, e.g. generate a .bun file just for all URL imports. - // There are questions of determinism, but perhaps that's what Integrity would do. - single_file_module = 100, - - _, - - pub fn isGit(this: Tag) bool { - return this == .git or this == .github; + pub fn isGit(this: *const This) bool { + return this.tag.isGit(); } - pub fn canEnqueueInstallTask(this: Tag) bool { - return this == .npm or this == .local_tarball or this == .remote_tarball or this == .git or this == .github; + pub fn canEnqueueInstallTask(this: *const This) bool { + return this.tag.canEnqueueInstallTask(); } + + const FromTextLockfileError = OOM || error{ + UnexpectedResolution, + InvalidSemver, + }; + + pub fn fromTextLockfile(res_str: string, string_buf: *String.Buf) FromTextLockfileError!This { + if (strings.hasPrefixComptime(res_str, "root:")) { + return This.init(.{ .root = {} }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "link:")) |link| { + return This.init(.{ .symlink = try string_buf.append(link) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "workspace:")) |workspace| { + return This.init(.{ .workspace = try string_buf.append(workspace) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "file:")) |folder| { + return This.init(.{ .folder = try string_buf.append(folder) }); + } + + return switch (Dependency.Version.Tag.infer(res_str)) { + .git => This.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }), + .github => This.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }), + .tarball => { + if (Dependency.isRemoteTarball(res_str)) { + return This.init(.{ .remote_tarball = try string_buf.append(res_str) }); + } + + return This.init(.{ .local_tarball = try string_buf.append(res_str) }); + }, + .npm => { + const version_literal = try string_buf.append(res_str); + const parsed = Semver.Version.parse(version_literal.sliced(string_buf.bytes.items)); + + if (!parsed.valid) { + return error.UnexpectedResolution; + } + + if (parsed.version.major == null or parsed.version.minor == null or parsed.version.patch == null) { + return error.UnexpectedResolution; + } + + return .{ + .tag = .npm, + .value = .{ + .npm = .{ + .version = parsed.version.min(), + + // will fill this later + .url = .{}, + }, + }, + }; + }, + + // covered above + .workspace => error.UnexpectedResolution, + .symlink => error.UnexpectedResolution, + .folder => error.UnexpectedResolution, + + // even though it's a dependency type, it's not + // possible for 'catalog:' to be written to the + // lockfile for any resolution because the install + // will fail it it's not successfully replaced by + // a version + .catalog => error.UnexpectedResolution, + + // should not happen + .dist_tag => error.UnexpectedResolution, + .uninitialized => error.UnexpectedResolution, + }; + } + + pub fn order( + lhs: *const This, + rhs: *const This, + lhs_buf: []const u8, + rhs_buf: []const u8, + ) std.math.Order { + if (lhs.tag != rhs.tag) { + return std.math.order(@intFromEnum(lhs.tag), @intFromEnum(rhs.tag)); + } + + return switch (lhs.tag) { + .npm => lhs.value.npm.order(rhs.value.npm, lhs_buf, rhs_buf), + .local_tarball => lhs.value.local_tarball.order(&rhs.value.local_tarball, lhs_buf, rhs_buf), + .folder => lhs.value.folder.order(&rhs.value.folder, lhs_buf, rhs_buf), + .remote_tarball => lhs.value.remote_tarball.order(&rhs.value.remote_tarball, lhs_buf, rhs_buf), + .workspace => lhs.value.workspace.order(&rhs.value.workspace, lhs_buf, rhs_buf), + .symlink => lhs.value.symlink.order(&rhs.value.symlink, lhs_buf, rhs_buf), + .single_file_module => lhs.value.single_file_module.order(&rhs.value.single_file_module, lhs_buf, rhs_buf), + .git => lhs.value.git.order(&rhs.value.git, lhs_buf, rhs_buf), + .github => lhs.value.github.order(&rhs.value.github, lhs_buf, rhs_buf), + else => .eq, + }; + } + + pub fn count(this: *const This, buf: []const u8, comptime Builder: type, builder: Builder) void { + switch (this.tag) { + .npm => this.value.npm.count(buf, Builder, builder), + .local_tarball => builder.count(this.value.local_tarball.slice(buf)), + .folder => builder.count(this.value.folder.slice(buf)), + .remote_tarball => builder.count(this.value.remote_tarball.slice(buf)), + .workspace => builder.count(this.value.workspace.slice(buf)), + .symlink => builder.count(this.value.symlink.slice(buf)), + .single_file_module => builder.count(this.value.single_file_module.slice(buf)), + .git => this.value.git.count(buf, Builder, builder), + .github => this.value.github.count(buf, Builder, builder), + else => {}, + } + } + + pub fn clone(this: *const This, buf: []const u8, comptime Builder: type, builder: Builder) This { + return .{ + .tag = this.tag, + .value = switch (this.tag) { + .npm => Value.init(.{ .npm = this.value.npm.clone(buf, Builder, builder) }), + .local_tarball => Value.init(.{ + .local_tarball = builder.append(String, this.value.local_tarball.slice(buf)), + }), + .folder => Value.init(.{ + .folder = builder.append(String, this.value.folder.slice(buf)), + }), + .remote_tarball => Value.init(.{ + .remote_tarball = builder.append(String, this.value.remote_tarball.slice(buf)), + }), + .workspace => Value.init(.{ + .workspace = builder.append(String, this.value.workspace.slice(buf)), + }), + .symlink => Value.init(.{ + .symlink = builder.append(String, this.value.symlink.slice(buf)), + }), + .single_file_module => Value.init(.{ + .single_file_module = builder.append(String, this.value.single_file_module.slice(buf)), + }), + .git => Value.init(.{ + .git = this.value.git.clone(buf, Builder, builder), + }), + .github => Value.init(.{ + .github = this.value.github.clone(buf, Builder, builder), + }), + .root => Value.init(.{ .root = {} }), + else => { + std.debug.panic("Internal error: unexpected resolution tag: {}", .{this.tag}); + }, + }, + }; + } + + pub fn fmt(this: *const This, string_bytes: []const u8, path_sep: bun.fmt.PathFormatOptions.Sep) Formatter { + return Formatter{ + .resolution = this, + .buf = string_bytes, + .path_sep = path_sep, + }; + } + + const StorePathFormatter = struct { + res: *const This, + string_buf: string, + // opts: String.StorePathFormatter.Options, + + pub fn format(this: StorePathFormatter, comptime _: string, _: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + const string_buf = this.string_buf; + const res = this.res.value; + switch (this.res.tag) { + .root => try writer.writeAll("root"), + .npm => try writer.print("{}", .{res.npm.version.fmt(string_buf)}), + .local_tarball => try writer.print("{}", .{res.local_tarball.fmtStorePath(string_buf)}), + .remote_tarball => try writer.print("{}", .{res.remote_tarball.fmtStorePath(string_buf)}), + .folder => try writer.print("{}", .{res.folder.fmtStorePath(string_buf)}), + .git => try writer.print("{}", .{res.git.fmtStorePath("git+", string_buf)}), + .github => try writer.print("{}", .{res.github.fmtStorePath("github+", string_buf)}), + .workspace => try writer.print("{}", .{res.workspace.fmtStorePath(string_buf)}), + .symlink => try writer.print("{}", .{res.symlink.fmtStorePath(string_buf)}), + .single_file_module => try writer.print("{}", .{res.single_file_module.fmtStorePath(string_buf)}), + else => {}, + } + } + }; + + pub fn fmtStorePath(this: *const This, string_buf: string) StorePathFormatter { + return .{ + .res = this, + .string_buf = string_buf, + }; + } + + pub fn fmtURL(this: *const This, string_bytes: []const u8) URLFormatter { + return URLFormatter{ .resolution = this, .buf = string_bytes }; + } + + pub fn fmtForDebug(this: *const This, string_bytes: []const u8) DebugFormatter { + return DebugFormatter{ .resolution = this, .buf = string_bytes }; + } + + pub fn eql( + lhs: *const This, + rhs: *const This, + lhs_string_buf: []const u8, + rhs_string_buf: []const u8, + ) bool { + if (lhs.tag != rhs.tag) return false; + + return switch (lhs.tag) { + .root => true, + .npm => lhs.value.npm.eql(rhs.value.npm), + .local_tarball => lhs.value.local_tarball.eql( + rhs.value.local_tarball, + lhs_string_buf, + rhs_string_buf, + ), + .folder => lhs.value.folder.eql( + rhs.value.folder, + lhs_string_buf, + rhs_string_buf, + ), + .remote_tarball => lhs.value.remote_tarball.eql( + rhs.value.remote_tarball, + lhs_string_buf, + rhs_string_buf, + ), + .workspace => lhs.value.workspace.eql( + rhs.value.workspace, + lhs_string_buf, + rhs_string_buf, + ), + .symlink => lhs.value.symlink.eql( + rhs.value.symlink, + lhs_string_buf, + rhs_string_buf, + ), + .single_file_module => lhs.value.single_file_module.eql( + rhs.value.single_file_module, + lhs_string_buf, + rhs_string_buf, + ), + .git => lhs.value.git.eql( + &rhs.value.git, + lhs_string_buf, + rhs_string_buf, + ), + .github => lhs.value.github.eql( + &rhs.value.github, + lhs_string_buf, + rhs_string_buf, + ), + else => unreachable, + }; + } + + pub const URLFormatter = struct { + resolution: *const This, + + buf: []const u8, + + pub fn format(formatter: URLFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + const buf = formatter.buf; + const value = formatter.resolution.value; + switch (formatter.resolution.tag) { + .npm => try writer.writeAll(value.npm.url.slice(formatter.buf)), + .local_tarball => try bun.fmt.fmtPath(u8, value.local_tarball.slice(buf), .{ .path_sep = .posix }).format("", {}, writer), + .folder => try writer.writeAll(value.folder.slice(formatter.buf)), + .remote_tarball => try writer.writeAll(value.remote_tarball.slice(formatter.buf)), + .git => try value.git.formatAs("git+", formatter.buf, layout, opts, writer), + .github => try value.github.formatAs("github:", formatter.buf, layout, opts, writer), + .workspace => try std.fmt.format(writer, "workspace:{s}", .{value.workspace.slice(formatter.buf)}), + .symlink => try std.fmt.format(writer, "link:{s}", .{value.symlink.slice(formatter.buf)}), + .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(formatter.buf)}), + else => {}, + } + } + }; + + pub const Formatter = struct { + resolution: *const This, + buf: []const u8, + path_sep: bun.fmt.PathFormatOptions.Sep, + + pub fn format(formatter: Formatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) @TypeOf(writer).Error!void { + const buf = formatter.buf; + const value = formatter.resolution.value; + switch (formatter.resolution.tag) { + .npm => try value.npm.version.fmt(buf).format(layout, opts, writer), + .local_tarball => try bun.fmt.fmtPath(u8, value.local_tarball.slice(buf), .{ .path_sep = formatter.path_sep }).format("", {}, writer), + .folder => try bun.fmt.fmtPath(u8, value.folder.slice(buf), .{ .path_sep = formatter.path_sep }).format("", {}, writer), + .remote_tarball => try writer.writeAll(value.remote_tarball.slice(buf)), + .git => try value.git.formatAs("git+", buf, layout, opts, writer), + .github => try value.github.formatAs("github:", buf, layout, opts, writer), + .workspace => try std.fmt.format(writer, "workspace:{s}", .{bun.fmt.fmtPath(u8, value.workspace.slice(buf), .{ + .path_sep = formatter.path_sep, + })}), + .symlink => try std.fmt.format(writer, "link:{s}", .{bun.fmt.fmtPath(u8, value.symlink.slice(buf), .{ + .path_sep = formatter.path_sep, + })}), + .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(buf)}), + else => {}, + } + } + }; + + pub const DebugFormatter = struct { + resolution: *const This, + buf: []const u8, + + pub fn format(formatter: DebugFormatter, comptime layout: []const u8, opts: std.fmt.FormatOptions, writer: anytype) !void { + try writer.writeAll("Resolution{ ."); + try writer.writeAll(bun.tagName(Tag, formatter.resolution.tag) orelse "invalid"); + try writer.writeAll(" = "); + switch (formatter.resolution.tag) { + .npm => try formatter.resolution.value.npm.version.fmt(formatter.buf).format(layout, opts, writer), + .local_tarball => try writer.writeAll(formatter.resolution.value.local_tarball.slice(formatter.buf)), + .folder => try writer.writeAll(formatter.resolution.value.folder.slice(formatter.buf)), + .remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)), + .git => try formatter.resolution.value.git.formatAs("git+", formatter.buf, layout, opts, writer), + .github => try formatter.resolution.value.github.formatAs("github:", formatter.buf, layout, opts, writer), + .workspace => try std.fmt.format(writer, "workspace:{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), + .symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), + .single_file_module => try std.fmt.format(writer, "module:{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), + else => try writer.writeAll("{}"), + } + try writer.writeAll(" }"); + } + }; + + pub const Value = extern union { + uninitialized: void, + root: void, + + npm: VersionedURLType(SemverIntType), + + folder: String, + + /// File path to a tarball relative to the package root + local_tarball: String, + + github: Repository, + + git: Repository, + + /// global link + symlink: String, + + workspace: String, + + /// URL to a tarball. + remote_tarball: String, + + single_file_module: String, + + /// To avoid undefined memory between union values, we must zero initialize the union first. + pub fn init(field: bun.meta.Tagged(Value, Tag)) Value { + return switch (field) { + inline else => |v, t| @unionInit(Value, @tagName(t), v), + }; + } + }; + + pub const Tag = enum(u8) { + uninitialized = 0, + root = 1, + npm = 2, + folder = 4, + + local_tarball = 8, + + github = 16, + + git = 32, + + symlink = 64, + + workspace = 72, + + remote_tarball = 80, + + // This is a placeholder for now. + // But the intent is to eventually support URL imports at the package manager level. + // + // There are many ways to do it, but perhaps one way to be maximally compatible is just removing the protocol part of the URL. + // + // For example, bun would transform this input: + // + // import _ from "https://github.com/lodash/lodash/lodash.min.js"; + // + // Into: + // + // import _ from "github.com/lodash/lodash/lodash.min.js"; + // + // github.com would become a package, with it's own package.json + // This is similar to how Go does it, except it wouldn't clone the whole repo. + // There are more efficient ways to do this, e.g. generate a .bun file just for all URL imports. + // There are questions of determinism, but perhaps that's what Integrity would do. + single_file_module = 100, + + _, + + pub fn isGit(this: Tag) bool { + return this == .git or this == .github; + } + + pub fn canEnqueueInstallTask(this: Tag) bool { + return this == .npm or this == .local_tarball or this == .remote_tarball or this == .git or this == .github; + } + }; }; -}; +} const string = []const u8; const std = @import("std"); const Repository = @import("./repository.zig").Repository; -const VersionedURL = @import("./versioned_url.zig").VersionedURL; +const VersionedURLType = @import("./versioned_url.zig").VersionedURLType; const bun = @import("bun"); const OOM = bun.OOM; diff --git a/src/install/versioned_url.zig b/src/install/versioned_url.zig index 5be724d209..a5cfb6e627 100644 --- a/src/install/versioned_url.zig +++ b/src/install/versioned_url.zig @@ -1,27 +1,42 @@ -pub const VersionedURL = extern struct { - url: String, - version: Semver.Version, +pub const VersionedURL = VersionedURLType(u64); +pub const OldV2VersionedURL = VersionedURLType(u32); - pub fn eql(this: VersionedURL, other: VersionedURL) bool { - return this.version.eql(other.version); - } +pub fn VersionedURLType(comptime SemverIntType: type) type { + return extern struct { + url: String, + version: Semver.VersionType(SemverIntType), - pub fn order(this: VersionedURL, other: VersionedURL, lhs_buf: []const u8, rhs_buf: []const u8) @import("std").math.Order { - return this.version.order(other.version, lhs_buf, rhs_buf); - } + pub fn eql(this: @This(), other: @This()) bool { + return this.version.eql(other.version); + } - pub fn count(this: VersionedURL, buf: []const u8, comptime Builder: type, builder: Builder) void { - this.version.count(buf, comptime Builder, builder); - builder.count(this.url.slice(buf)); - } + pub fn order(this: @This(), other: @This(), lhs_buf: []const u8, rhs_buf: []const u8) @import("std").math.Order { + return this.version.order(other.version, lhs_buf, rhs_buf); + } - pub fn clone(this: VersionedURL, buf: []const u8, comptime Builder: type, builder: Builder) VersionedURL { - return VersionedURL{ - .version = this.version.append(buf, Builder, builder), - .url = builder.append(String, this.url.slice(buf)), - }; - } -}; + pub fn count(this: @This(), buf: []const u8, comptime Builder: type, builder: Builder) void { + this.version.count(buf, comptime Builder, builder); + builder.count(this.url.slice(buf)); + } + + pub fn clone(this: @This(), buf: []const u8, comptime Builder: type, builder: Builder) @This() { + return @This(){ + .version = this.version.append(buf, Builder, builder), + .url = builder.append(String, this.url.slice(buf)), + }; + } + + pub fn migrate(this: @This()) VersionedURLType(u64) { + if (comptime SemverIntType != u32) { + @compileError("unexpected SemverIntType"); + } + return .{ + .url = this.url, + .version = this.version.migrate(), + }; + } + }; +} const bun = @import("bun"); diff --git a/src/semver.zig b/src/semver.zig index 43b85e3407..c5b9699edc 100644 --- a/src/semver.zig +++ b/src/semver.zig @@ -2,6 +2,7 @@ pub const String = @import("./semver/SemverString.zig").String; pub const ExternalString = @import("./semver/ExternalString.zig").ExternalString; pub const Version = @import("./semver/Version.zig").Version; +pub const VersionType = @import("./semver/Version.zig").VersionType; pub const SlicedString = @import("./semver/SlicedString.zig"); pub const Range = @import("./semver/SemverRange.zig"); diff --git a/src/semver/SemverQuery.zig b/src/semver/SemverQuery.zig index fe98e42c6a..ab78fcf412 100644 --- a/src/semver/SemverQuery.zig +++ b/src/semver/SemverQuery.zig @@ -425,9 +425,9 @@ pub const Token = struct { .right = .{ .op = .lte, .version = .{ - .major = std.math.maxInt(u32), - .minor = std.math.maxInt(u32), - .patch = std.math.maxInt(u32), + .major = std.math.maxInt(u64), + .minor = std.math.maxInt(u64), + .patch = std.math.maxInt(u64), }, }, }, @@ -437,8 +437,8 @@ pub const Token = struct { .op = .lte, .version = .{ .major = version.major orelse 0, - .minor = std.math.maxInt(u32), - .patch = std.math.maxInt(u32), + .minor = std.math.maxInt(u64), + .patch = std.math.maxInt(u64), }, }, }, @@ -458,8 +458,8 @@ pub const Token = struct { .op = .gt, .version = .{ .major = version.major orelse 0, - .minor = std.math.maxInt(u32), - .patch = std.math.maxInt(u32), + .minor = std.math.maxInt(u64), + .patch = std.math.maxInt(u64), }, }, }, @@ -483,7 +483,7 @@ pub const Token = struct { .version = .{ .major = version.major orelse 0, .minor = version.minor orelse 0, - .patch = std.math.maxInt(u32), + .patch = std.math.maxInt(u64), }, }, }, @@ -504,7 +504,7 @@ pub const Token = struct { .version = .{ .major = version.major orelse 0, .minor = version.minor orelse 0, - .patch = std.math.maxInt(u32), + .patch = std.math.maxInt(u64), }, }, }, diff --git a/src/semver/Version.zig b/src/semver/Version.zig index 25489d33e5..ed18ba2910 100644 --- a/src/semver/Version.zig +++ b/src/semver/Version.zig @@ -1,306 +1,756 @@ -pub const Version = extern struct { - major: u32 = 0, - minor: u32 = 0, - patch: u32 = 0, - _tag_padding: [4]u8 = .{0} ** 4, // [see padding_checker.zig] - tag: Tag = .{}, +pub const Version = VersionType(u64); +pub const OldV2Version = VersionType(u32); - /// Assumes that there is only one buffer for all the strings - pub fn sortGt(ctx: []const u8, lhs: Version, rhs: Version) bool { - return orderFn(ctx, lhs, rhs) == .gt; - } - - pub fn orderFn(ctx: []const u8, lhs: Version, rhs: Version) std.math.Order { - return lhs.order(rhs, ctx, ctx); - } - - pub fn isZero(this: Version) bool { - return this.patch == 0 and this.minor == 0 and this.major == 0; - } - - pub fn parseUTF8(slice: []const u8) ParseResult { - return parse(.{ .buf = slice, .slice = slice }); - } - - pub fn cloneInto(this: Version, slice: []const u8, buf: *[]u8) Version { - return .{ - .major = this.major, - .minor = this.minor, - .patch = this.patch, - .tag = this.tag.cloneInto(slice, buf), - }; - } - - pub inline fn len(this: *const Version) u32 { - return this.tag.build.len + this.tag.pre.len; - } - - pub const Formatter = struct { - version: Version, - input: string, - - pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - const self = formatter.version; - try std.fmt.format(writer, "{?d}.{?d}.{?d}", .{ self.major, self.minor, self.patch }); - - if (self.tag.hasPre()) { - const pre = self.tag.pre.slice(formatter.input); - try writer.writeAll("-"); - try writer.writeAll(pre); - } - - if (self.tag.hasBuild()) { - const build = self.tag.build.slice(formatter.input); - try writer.writeAll("+"); - try writer.writeAll(build); - } - } - }; - - pub fn fmt(this: Version, input: string) Formatter { - return .{ .version = this, .input = input }; - } - - pub const DiffFormatter = struct { - version: Version, - buf: string, - other: Version, - other_buf: string, - - pub fn format(this: DiffFormatter, comptime fmt_: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { - if (!Output.enable_ansi_colors) { - // print normally if no colors - const formatter: Formatter = .{ .version = this.version, .input = this.buf }; - return Formatter.format(formatter, fmt_, options, writer); - } - - const diff = this.version.whichVersionIsDifferent(this.other, this.buf, this.other_buf) orelse .none; - - switch (diff) { - .major => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }), - .minor => { - if (this.version.major == 0) { - try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }); - } else { - try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }); - } - }, - .patch => { - if (this.version.major == 0 and this.version.minor == 0) { - try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }); - } else { - try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }); - } - }, - .none, .pre, .build => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ - this.version.major, this.version.minor, this.version.patch, - }), - } - - // might be pre or build. loop through all characters, and insert on - // first diff. - - var set_color = false; - if (this.version.tag.hasPre()) { - if (this.other.tag.hasPre()) { - const pre = this.version.tag.pre.slice(this.buf); - const other_pre = this.other.tag.pre.slice(this.other_buf); - - var first = true; - for (pre, 0..) |c, i| { - if (!set_color and i < other_pre.len and c != other_pre[i]) { - set_color = true; - try writer.writeAll(Output.prettyFmt("", true)); - } - if (first) { - first = false; - try writer.writeByte('-'); - } - try writer.writeByte(c); - } - } else { - try writer.print(Output.prettyFmt("-{}", true), .{this.version.tag.pre.fmt(this.buf)}); - set_color = true; - } - } - - if (this.version.tag.hasBuild()) { - if (this.other.tag.hasBuild()) { - const build = this.version.tag.build.slice(this.buf); - const other_build = this.other.tag.build.slice(this.other_buf); - - var first = true; - for (build, 0..) |c, i| { - if (!set_color and i < other_build.len and c != other_build[i]) { - set_color = true; - try writer.writeAll(Output.prettyFmt("", true)); - } - if (first) { - first = false; - try writer.writeByte('+'); - } - try writer.writeByte(c); - } - } else { - if (!set_color) { - try writer.print(Output.prettyFmt("+{}", true), .{this.version.tag.build.fmt(this.buf)}); - } else { - try writer.print("+{}", .{this.version.tag.build.fmt(this.other_buf)}); - } - } - } - - try writer.writeAll(Output.prettyFmt("", true)); - } - }; - - pub fn diffFmt(this: Version, other: Version, this_buf: string, other_buf: string) DiffFormatter { - return .{ - .version = this, - .buf = this_buf, - .other = other, - .other_buf = other_buf, - }; - } - - pub const ChangedVersion = enum { - major, - minor, - patch, - pre, - build, - none, - }; - - pub fn whichVersionIsDifferent( - left: Version, - right: Version, - left_buf: string, - right_buf: string, - ) ?ChangedVersion { - if (left.major != right.major) return .major; - if (left.minor != right.minor) return .minor; - if (left.patch != right.patch) return .patch; - - if (left.tag.hasPre() != right.tag.hasPre()) return .pre; - if (!left.tag.hasPre() and !right.tag.hasPre()) return null; - if (left.tag.orderPre(right.tag, left_buf, right_buf) != .eq) return .pre; - - if (left.tag.hasBuild() != right.tag.hasBuild()) return .build; - if (!left.tag.hasBuild() and !right.tag.hasBuild()) return null; - return if (left.tag.build.order(&right.tag.build, left_buf, right_buf) != .eq) - .build - else - null; - } - - pub fn count(this: *const Version, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void { - if (this.tag.hasPre() and !this.tag.pre.isInline()) builder.count(this.tag.pre.slice(buf)); - if (this.tag.hasBuild() and !this.tag.build.isInline()) builder.count(this.tag.build.slice(buf)); - } - - pub fn append(this: *const Version, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) Version { - var that = this.*; - - if (this.tag.hasPre() and !this.tag.pre.isInline()) that.tag.pre = builder.append(ExternalString, this.tag.pre.slice(buf)); - if (this.tag.hasBuild() and !this.tag.build.isInline()) that.tag.build = builder.append(ExternalString, this.tag.build.slice(buf)); - - return that; - } - - pub const Partial = struct { - major: ?u32 = null, - minor: ?u32 = null, - patch: ?u32 = null, +pub fn VersionType(comptime IntType: type) type { + return extern struct { + major: IntType = 0, + minor: IntType = 0, + patch: IntType = 0, + _tag_padding: [if (IntType == u32) 4 else 0]u8 = .{0} ** if (IntType == u32) 4 else 0, // [see padding_checker.zig] tag: Tag = .{}, - pub fn min(this: Partial) Version { + const This = @This(); + + pub fn migrate(this: This) VersionType(u64) { + if (comptime IntType != u32) { + @compileError("unexpected IntType"); + } + return .{ - .major = this.major orelse 0, - .minor = this.minor orelse 0, - .patch = this.patch orelse 0, - .tag = this.tag, + .major = this.major, + .minor = this.minor, + .patch = this.patch, + ._tag_padding = .{}, + .tag = .{ + .pre = this.tag.pre, + .build = this.tag.build, + }, }; } - pub fn max(this: Partial) Version { + /// Assumes that there is only one buffer for all the strings + pub fn sortGt(ctx: []const u8, lhs: This, rhs: This) bool { + return orderFn(ctx, lhs, rhs) == .gt; + } + + pub fn orderFn(ctx: []const u8, lhs: This, rhs: This) std.math.Order { + return lhs.order(rhs, ctx, ctx); + } + + pub fn isZero(this: This) bool { + return this.patch == 0 and this.minor == 0 and this.major == 0; + } + + pub fn parseUTF8(slice: []const u8) ParseResult { + return parse(.{ .buf = slice, .slice = slice }); + } + + pub fn cloneInto(this: This, slice: []const u8, buf: *[]u8) This { return .{ - .major = this.major orelse std.math.maxInt(u32), - .minor = this.minor orelse std.math.maxInt(u32), - .patch = this.patch orelse std.math.maxInt(u32), - .tag = this.tag, + .major = this.major, + .minor = this.minor, + .patch = this.patch, + .tag = this.tag.cloneInto(slice, buf), }; } - }; - const Hashable = extern struct { - major: u32, - minor: u32, - patch: u32, - pre: u64, - build: u64, - }; + pub inline fn len(this: *const This) u32 { + return this.tag.build.len + this.tag.pre.len; + } - pub fn hash(this: Version) u64 { - const hashable = Hashable{ - .major = this.major, - .minor = this.minor, - .patch = this.patch, - .pre = this.tag.pre.hash, - .build = this.tag.build.hash, + pub const Formatter = struct { + version: This, + input: string, + + pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + const self = formatter.version; + try std.fmt.format(writer, "{?d}.{?d}.{?d}", .{ self.major, self.minor, self.patch }); + + if (self.tag.hasPre()) { + const pre = self.tag.pre.slice(formatter.input); + try writer.writeAll("-"); + try writer.writeAll(pre); + } + + if (self.tag.hasBuild()) { + const build = self.tag.build.slice(formatter.input); + try writer.writeAll("+"); + try writer.writeAll(build); + } + } }; - const bytes = std.mem.asBytes(&hashable); - return bun.Wyhash.hash(0, bytes); - } - pub fn eql(lhs: Version, rhs: Version) bool { - return lhs.major == rhs.major and lhs.minor == rhs.minor and lhs.patch == rhs.patch and rhs.tag.eql(lhs.tag); - } - - pub const HashContext = struct { - pub fn hash(_: @This(), lhs: Version) u32 { - return @as(u32, @truncate(lhs.hash())); + pub fn fmt(this: This, input: string) Formatter { + return .{ .version = this, .input = input }; } - pub fn eql(_: @This(), lhs: Version, rhs: Version) bool { - return lhs.eql(rhs); + pub const DiffFormatter = struct { + version: This, + buf: string, + other: This, + other_buf: string, + + pub fn format(this: DiffFormatter, comptime fmt_: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + if (!Output.enable_ansi_colors) { + // print normally if no colors + const formatter: Formatter = .{ .version = this.version, .input = this.buf }; + return Formatter.format(formatter, fmt_, options, writer); + } + + const diff = this.version.whichVersionIsDifferent(this.other, this.buf, this.other_buf) orelse .none; + + switch (diff) { + .major => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }), + .minor => { + if (this.version.major == 0) { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } else { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } + }, + .patch => { + if (this.version.major == 0 and this.version.minor == 0) { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } else { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } + }, + .none, .pre, .build => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }), + } + + // might be pre or build. loop through all characters, and insert on + // first diff. + + var set_color = false; + if (this.version.tag.hasPre()) { + if (this.other.tag.hasPre()) { + const pre = this.version.tag.pre.slice(this.buf); + const other_pre = this.other.tag.pre.slice(this.other_buf); + + var first = true; + for (pre, 0..) |c, i| { + if (!set_color and i < other_pre.len and c != other_pre[i]) { + set_color = true; + try writer.writeAll(Output.prettyFmt("", true)); + } + if (first) { + first = false; + try writer.writeByte('-'); + } + try writer.writeByte(c); + } + } else { + try writer.print(Output.prettyFmt("-{}", true), .{this.version.tag.pre.fmt(this.buf)}); + set_color = true; + } + } + + if (this.version.tag.hasBuild()) { + if (this.other.tag.hasBuild()) { + const build = this.version.tag.build.slice(this.buf); + const other_build = this.other.tag.build.slice(this.other_buf); + + var first = true; + for (build, 0..) |c, i| { + if (!set_color and i < other_build.len and c != other_build[i]) { + set_color = true; + try writer.writeAll(Output.prettyFmt("", true)); + } + if (first) { + first = false; + try writer.writeByte('+'); + } + try writer.writeByte(c); + } + } else { + if (!set_color) { + try writer.print(Output.prettyFmt("+{}", true), .{this.version.tag.build.fmt(this.buf)}); + } else { + try writer.print("+{}", .{this.version.tag.build.fmt(this.other_buf)}); + } + } + } + + try writer.writeAll(Output.prettyFmt("", true)); + } + }; + + pub fn diffFmt(this: This, other: This, this_buf: string, other_buf: string) DiffFormatter { + return .{ + .version = this, + .buf = this_buf, + .other = other, + .other_buf = other_buf, + }; } - }; - pub const PinnedVersion = enum { - major, // ^ - minor, // ~ - patch, // = - }; + pub const ChangedVersion = enum { + major, + minor, + patch, + pre, + build, + none, + }; - /// Modified version of pnpm's `whichVersionIsPinned` - /// https://github.com/pnpm/pnpm/blob/bc0618cf192a9cafd0ab171a3673e23ed0869bbd/packages/which-version-is-pinned/src/index.ts#L9 - /// - /// Differences: - /// - It's not used for workspaces - /// - `npm:` is assumed already removed from aliased versions - /// - Invalid input is considered major pinned (important because these strings are coming - /// from package.json) - /// - /// The goal of this function is to avoid a complete parse of semver that's unused - pub fn whichVersionIsPinned(input: string) PinnedVersion { - const version = strings.trim(input, &strings.whitespace_chars); + pub fn whichVersionIsDifferent( + left: This, + right: This, + left_buf: string, + right_buf: string, + ) ?ChangedVersion { + if (left.major != right.major) return .major; + if (left.minor != right.minor) return .minor; + if (left.patch != right.patch) return .patch; - var i: usize = 0; + if (left.tag.hasPre() != right.tag.hasPre()) return .pre; + if (!left.tag.hasPre() and !right.tag.hasPre()) return null; + if (left.tag.orderPre(right.tag, left_buf, right_buf) != .eq) return .pre; - const pinned: PinnedVersion = pinned: { - for (0..version.len) |j| { - switch (version[j]) { + if (left.tag.hasBuild() != right.tag.hasBuild()) return .build; + if (!left.tag.hasBuild() and !right.tag.hasBuild()) return null; + return if (left.tag.build.order(&right.tag.build, left_buf, right_buf) != .eq) + .build + else + null; + } + + pub fn count(this: *const This, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void { + if (this.tag.hasPre() and !this.tag.pre.isInline()) builder.count(this.tag.pre.slice(buf)); + if (this.tag.hasBuild() and !this.tag.build.isInline()) builder.count(this.tag.build.slice(buf)); + } + + pub fn append(this: *const This, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) This { + var that = this.*; + + if (this.tag.hasPre() and !this.tag.pre.isInline()) that.tag.pre = builder.append(ExternalString, this.tag.pre.slice(buf)); + if (this.tag.hasBuild() and !this.tag.build.isInline()) that.tag.build = builder.append(ExternalString, this.tag.build.slice(buf)); + + return that; + } + + pub const Partial = struct { + major: ?IntType = null, + minor: ?IntType = null, + patch: ?IntType = null, + tag: Tag = .{}, + + pub fn min(this: Partial) This { + return .{ + .major = this.major orelse 0, + .minor = this.minor orelse 0, + .patch = this.patch orelse 0, + .tag = this.tag, + }; + } + + pub fn max(this: Partial) This { + return .{ + .major = this.major orelse std.math.maxInt(IntType), + .minor = this.minor orelse std.math.maxInt(IntType), + .patch = this.patch orelse std.math.maxInt(IntType), + .tag = this.tag, + }; + } + }; + + pub fn eql(lhs: This, rhs: This) bool { + return lhs.major == rhs.major and lhs.minor == rhs.minor and lhs.patch == rhs.patch and rhs.tag.eql(lhs.tag); + } + + pub const PinnedVersion = enum { + major, // ^ + minor, // ~ + patch, // = + }; + + /// Modified version of pnpm's `whichVersionIsPinned` + /// https://github.com/pnpm/pnpm/blob/bc0618cf192a9cafd0ab171a3673e23ed0869bbd/packages/which-version-is-pinned/src/index.ts#L9 + /// + /// Differences: + /// - It's not used for workspaces + /// - `npm:` is assumed already removed from aliased versions + /// - Invalid input is considered major pinned (important because these strings are coming + /// from package.json) + /// + /// The goal of this function is to avoid a complete parse of semver that's unused + pub fn whichVersionIsPinned(input: string) PinnedVersion { + const version = strings.trim(input, &strings.whitespace_chars); + + var i: usize = 0; + + const pinned: PinnedVersion = pinned: { + for (0..version.len) |j| { + switch (version[j]) { + // newlines & whitespace + ' ', + '\t', + '\n', + '\r', + std.ascii.control_code.vt, + std.ascii.control_code.ff, + + // version separators + 'v', + '=', + => {}, + + else => |c| { + i = j; + + switch (c) { + '~', '^' => { + i += 1; + + for (i..version.len) |k| { + switch (version[k]) { + ' ', + '\t', + '\n', + '\r', + std.ascii.control_code.vt, + std.ascii.control_code.ff, + => { + // `v` and `=` not included. + // `~v==1` would update to `^1.1.0` if versions `1.0.0`, `1.0.1`, `1.1.0`, and `2.0.0` are available + // note that `~` changes to `^` + }, + + else => { + i = k; + break :pinned if (c == '~') .minor else .major; + }, + } + } + + // entire version after `~` is whitespace. invalid + return .major; + }, + + '0'...'9' => break :pinned .patch, + + // could be invalid, could also be valid range syntax (>=, ...) + // either way, pin major + else => return .major, + } + }, + } + } + + // entire semver is whitespace, `v`, and `=`. Invalid + return .major; + }; + + // `pinned` is `.major`, `.minor`, or `.patch`. Check for each version core number: + // - if major is missing, return `if (pinned == .patch) .major else pinned` + // - if minor is missing, return `if (pinned == .patch) .minor else pinned` + // - if patch is missing, return `pinned` + // - if there's whitespace or non-digit characters between core numbers, return `.major` + // - if the end is reached, return `pinned` + + // major + if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; + var d = version[i]; + while (std.ascii.isDigit(d)) { + i += 1; + if (i >= version.len) return if (pinned == .patch) .major else pinned; + d = version[i]; + } + + if (d != '.') return .major; + + // minor + i += 1; + if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; + d = version[i]; + while (std.ascii.isDigit(d)) { + i += 1; + if (i >= version.len) return if (pinned == .patch) .minor else pinned; + d = version[i]; + } + + if (d != '.') return .major; + + // patch + i += 1; + if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; + d = version[i]; + while (std.ascii.isDigit(d)) { + i += 1; + + // patch is done and at input end, valid + if (i >= version.len) return pinned; + d = version[i]; + } + + // Skip remaining valid pre/build tag characters and whitespace. + // Does not validate whitespace used inside pre/build tags. + if (!validPreOrBuildTagCharacter(d) or std.ascii.isWhitespace(d)) return .major; + i += 1; + + // at this point the semver is valid so we can return true if it ends + if (i >= version.len) return pinned; + d = version[i]; + while (validPreOrBuildTagCharacter(d) and !std.ascii.isWhitespace(d)) { + i += 1; + if (i >= version.len) return pinned; + d = version[i]; + } + + // We've come across a character that is not valid for tags or is whitespace. + // Trailing whitespace was trimmed so we can assume there's another range + return .major; + } + + fn validPreOrBuildTagCharacter(c: u8) bool { + return switch (c) { + '-', '+', '.', 'A'...'Z', 'a'...'z', '0'...'9' => true, + else => false, + }; + } + + pub fn isTaggedVersionOnly(input: []const u8) bool { + const version = strings.trim(input, &strings.whitespace_chars); + + // first needs to be a-z + if (version.len == 0 or !std.ascii.isAlphabetic(version[0])) return false; + + for (1..version.len) |i| { + if (!std.ascii.isAlphanumeric(version[i])) return false; + } + + return true; + } + + pub fn orderWithoutTag( + lhs: This, + rhs: This, + ) std.math.Order { + if (lhs.major < rhs.major) return .lt; + if (lhs.major > rhs.major) return .gt; + if (lhs.minor < rhs.minor) return .lt; + if (lhs.minor > rhs.minor) return .gt; + if (lhs.patch < rhs.patch) return .lt; + if (lhs.patch > rhs.patch) return .gt; + + if (lhs.tag.hasPre()) { + if (!rhs.tag.hasPre()) return .lt; + } else { + if (rhs.tag.hasPre()) return .gt; + } + + return .eq; + } + + pub fn order( + lhs: This, + rhs: This, + lhs_buf: []const u8, + rhs_buf: []const u8, + ) std.math.Order { + const order_without_tag = orderWithoutTag(lhs, rhs); + if (order_without_tag != .eq) return order_without_tag; + + return lhs.tag.order(rhs.tag, lhs_buf, rhs_buf); + } + + pub fn orderWithoutBuild( + lhs: This, + rhs: This, + lhs_buf: []const u8, + rhs_buf: []const u8, + ) std.math.Order { + const order_without_tag = orderWithoutTag(lhs, rhs); + if (order_without_tag != .eq) return order_without_tag; + + return lhs.tag.orderWithoutBuild(rhs.tag, lhs_buf, rhs_buf); + } + + pub const Tag = extern struct { + pre: ExternalString = ExternalString{}, + build: ExternalString = ExternalString{}, + + pub fn orderPre(lhs: Tag, rhs: Tag, lhs_buf: []const u8, rhs_buf: []const u8) std.math.Order { + const lhs_str = lhs.pre.slice(lhs_buf); + const rhs_str = rhs.pre.slice(rhs_buf); + + // 1. split each by '.', iterating through each one looking for integers + // 2. compare as integers, or if not possible compare as string + // 3. whichever is greater is the greater one + // + // 1.0.0-canary.0.0.0.0.0.0 < 1.0.0-canary.0.0.0.0.0.1 + + var lhs_itr = strings.split(lhs_str, "."); + var rhs_itr = strings.split(rhs_str, "."); + + while (true) { + const lhs_part = lhs_itr.next(); + const rhs_part = rhs_itr.next(); + + if (lhs_part == null and rhs_part == null) return .eq; + + // if right is null, left is greater than. + if (rhs_part == null) return .gt; + + // if left is null, left is less than. + if (lhs_part == null) return .lt; + + const lhs_uint: ?IntType = std.fmt.parseUnsigned(IntType, lhs_part.?, 10) catch null; + const rhs_uint: ?IntType = std.fmt.parseUnsigned(IntType, rhs_part.?, 10) catch null; + + // a part that doesn't parse as an integer is greater than a part that does + // https://github.com/npm/node-semver/blob/816c7b2cbfcb1986958a290f941eddfd0441139e/internal/identifiers.js#L12 + if (lhs_uint != null and rhs_uint == null) return .lt; + if (lhs_uint == null and rhs_uint != null) return .gt; + + if (lhs_uint == null and rhs_uint == null) { + switch (strings.order(lhs_part.?, rhs_part.?)) { + .eq => { + // continue to the next part + continue; + }, + else => |not_equal| return not_equal, + } + } + + switch (std.math.order(lhs_uint.?, rhs_uint.?)) { + .eq => continue, + else => |not_equal| return not_equal, + } + } + + unreachable; + } + + pub fn order( + lhs: Tag, + rhs: Tag, + lhs_buf: []const u8, + rhs_buf: []const u8, + ) std.math.Order { + if (!lhs.pre.isEmpty() and !rhs.pre.isEmpty()) { + return lhs.orderPre(rhs, lhs_buf, rhs_buf); + } + + const pre_order = lhs.pre.order(&rhs.pre, lhs_buf, rhs_buf); + if (pre_order != .eq) return pre_order; + + return lhs.build.order(&rhs.build, lhs_buf, rhs_buf); + } + + pub fn orderWithoutBuild( + lhs: Tag, + rhs: Tag, + lhs_buf: []const u8, + rhs_buf: []const u8, + ) std.math.Order { + if (!lhs.pre.isEmpty() and !rhs.pre.isEmpty()) { + return lhs.orderPre(rhs, lhs_buf, rhs_buf); + } + + return lhs.pre.order(&rhs.pre, lhs_buf, rhs_buf); + } + + pub fn cloneInto(this: Tag, slice: []const u8, buf: *[]u8) Tag { + var pre: String = this.pre.value; + var build: String = this.build.value; + + if (this.pre.isInline()) { + pre = this.pre.value; + } else { + const pre_slice = this.pre.slice(slice); + bun.copy(u8, buf.*, pre_slice); + pre = String.init(buf.*, buf.*[0..pre_slice.len]); + buf.* = buf.*[pre_slice.len..]; + } + + if (this.build.isInline()) { + build = this.build.value; + } else { + const build_slice = this.build.slice(slice); + bun.copy(u8, buf.*, build_slice); + build = String.init(buf.*, buf.*[0..build_slice.len]); + buf.* = buf.*[build_slice.len..]; + } + + return .{ + .pre = .{ + .value = pre, + .hash = this.pre.hash, + }, + .build = .{ + .value = build, + .hash = this.build.hash, + }, + }; + } + + pub inline fn hasPre(this: Tag) bool { + return !this.pre.isEmpty(); + } + + pub inline fn hasBuild(this: Tag) bool { + return !this.build.isEmpty(); + } + + pub fn eql(lhs: Tag, rhs: Tag) bool { + return lhs.pre.hash == rhs.pre.hash; + } + + pub const TagResult = struct { + tag: Tag = Tag{}, + len: u32 = 0, + }; + + var multi_tag_warn = false; + // TODO: support multiple tags + + pub fn parse(sliced_string: SlicedString) TagResult { + return parseWithPreCount(sliced_string, 0); + } + + pub fn parseWithPreCount(sliced_string: SlicedString, initial_pre_count: u32) TagResult { + var input = sliced_string.slice; + var build_count: u32 = 0; + var pre_count: u32 = initial_pre_count; + + for (input) |c| { + switch (c) { + ' ' => break, + '+' => { + build_count += 1; + }, + '-' => { + pre_count += 1; + }, + else => {}, + } + } + + if (build_count == 0 and pre_count == 0) { + return TagResult{ + .len = 0, + }; + } + + const State = enum { none, pre, build }; + var result = TagResult{}; + // Common case: no allocation is necessary. + var state = State.none; + var start: usize = 0; + + var i: usize = 0; + + while (i < input.len) : (i += 1) { + const c = input[i]; + switch (c) { + '+' => { + // qualifier ::= ( '-' pre )? ( '+' build )? + if (state == .pre or state == .none and initial_pre_count > 0) { + result.tag.pre = sliced_string.sub(input[start..i]).external(); + } + + if (state != .build) { + state = .build; + start = i + 1; + } + }, + '-' => { + if (state != .pre) { + state = .pre; + start = i + 1; + } + }, + + // only continue if character is a valid pre/build tag character + // https://semver.org/#spec-item-9 + 'a'...'z', 'A'...'Z', '0'...'9', '.' => {}, + + else => { + switch (state) { + .none => {}, + .pre => { + result.tag.pre = sliced_string.sub(input[start..i]).external(); + + state = State.none; + }, + .build => { + result.tag.build = sliced_string.sub(input[start..i]).external(); + if (comptime Environment.isDebug) { + assert(!strings.containsChar(result.tag.build.slice(sliced_string.buf), '-')); + } + state = State.none; + }, + } + result.len = @truncate(i); + break; + }, + } + } + + if (state == .none and initial_pre_count > 0) { + state = .pre; + start = 0; + } + + switch (state) { + .none => {}, + .pre => { + result.tag.pre = sliced_string.sub(input[start..i]).external(); + // a pre can contain multiple consecutive tags + // checking for "-" prefix is not enough, as --canary.67e7966.0 is a valid tag + state = State.none; + }, + .build => { + // a build can contain multiple consecutive tags + result.tag.build = sliced_string.sub(input[start..i]).external(); + + state = State.none; + }, + } + result.len = @as(u32, @truncate(i)); + + return result; + } + }; + + pub const ParseResult = struct { + wildcard: Query.Token.Wildcard = .none, + valid: bool = true, + version: This.Partial = .{}, + len: u32 = 0, + }; + + pub fn parse(sliced_string: SlicedString) ParseResult { + var input = sliced_string.slice; + var result = ParseResult{}; + + var part_i: u8 = 0; + var part_start_i: usize = 0; + var last_char_i: usize = 0; + + if (input.len == 0) { + result.valid = false; + return result; + } + var is_done = false; + + var i: usize = 0; + + for (0..input.len) |c| { + switch (input[c]) { // newlines & whitespace ' ', '\t', @@ -313,677 +763,221 @@ pub const Version = extern struct { 'v', '=', => {}, + else => { + i = c; + break; + }, + } + } - else => |c| { - i = j; + if (i == input.len) { + result.valid = false; + return result; + } - switch (c) { - '~', '^' => { - i += 1; + // two passes :( + while (i < input.len) { + if (is_done) { + break; + } - for (i..version.len) |k| { - switch (version[k]) { - ' ', - '\t', - '\n', - '\r', - std.ascii.control_code.vt, - std.ascii.control_code.ff, - => { - // `v` and `=` not included. - // `~v==1` would update to `^1.1.0` if versions `1.0.0`, `1.0.1`, `1.1.0`, and `2.0.0` are available - // note that `~` changes to `^` - }, + switch (input[i]) { + ' ' => { + is_done = true; + break; + }, + '|', '^', '#', '&', '%', '!' => { + is_done = true; + if (i > 0) { + i -= 1; + } + break; + }, + '0'...'9' => { + part_start_i = i; + i += 1; - else => { - i = k; - break :pinned if (c == '~') .minor else .major; - }, - } - } + while (i < input.len and switch (input[i]) { + '0'...'9' => true, + else => false, + }) { + i += 1; + } - // entire version after `~` is whitespace. invalid - return .major; + last_char_i = i; + + switch (part_i) { + 0 => { + result.version.major = parseVersionNumber(input[part_start_i..last_char_i]); + part_i = 1; }, + 1 => { + result.version.minor = parseVersionNumber(input[part_start_i..last_char_i]); + part_i = 2; + }, + 2 => { + result.version.patch = parseVersionNumber(input[part_start_i..last_char_i]); + part_i = 3; + }, + else => {}, + } - '0'...'9' => break :pinned .patch, - - // could be invalid, could also be valid range syntax (>=, ...) - // either way, pin major - else => return .major, + if (i < input.len and switch (input[i]) { + // `.` is expected only if there are remaining core version numbers + '.' => part_i != 3, + else => false, + }) { + i += 1; } }, - } - } - - // entire semver is whitespace, `v`, and `=`. Invalid - return .major; - }; - - // `pinned` is `.major`, `.minor`, or `.patch`. Check for each version core number: - // - if major is missing, return `if (pinned == .patch) .major else pinned` - // - if minor is missing, return `if (pinned == .patch) .minor else pinned` - // - if patch is missing, return `pinned` - // - if there's whitespace or non-digit characters between core numbers, return `.major` - // - if the end is reached, return `pinned` - - // major - if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; - var d = version[i]; - while (std.ascii.isDigit(d)) { - i += 1; - if (i >= version.len) return if (pinned == .patch) .major else pinned; - d = version[i]; - } - - if (d != '.') return .major; - - // minor - i += 1; - if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; - d = version[i]; - while (std.ascii.isDigit(d)) { - i += 1; - if (i >= version.len) return if (pinned == .patch) .minor else pinned; - d = version[i]; - } - - if (d != '.') return .major; - - // patch - i += 1; - if (i >= version.len or !std.ascii.isDigit(version[i])) return .major; - d = version[i]; - while (std.ascii.isDigit(d)) { - i += 1; - - // patch is done and at input end, valid - if (i >= version.len) return pinned; - d = version[i]; - } - - // Skip remaining valid pre/build tag characters and whitespace. - // Does not validate whitespace used inside pre/build tags. - if (!validPreOrBuildTagCharacter(d) or std.ascii.isWhitespace(d)) return .major; - i += 1; - - // at this point the semver is valid so we can return true if it ends - if (i >= version.len) return pinned; - d = version[i]; - while (validPreOrBuildTagCharacter(d) and !std.ascii.isWhitespace(d)) { - i += 1; - if (i >= version.len) return pinned; - d = version[i]; - } - - // We've come across a character that is not valid for tags or is whitespace. - // Trailing whitespace was trimmed so we can assume there's another range - return .major; - } - - fn validPreOrBuildTagCharacter(c: u8) bool { - return switch (c) { - '-', '+', '.', 'A'...'Z', 'a'...'z', '0'...'9' => true, - else => false, - }; - } - - pub fn isTaggedVersionOnly(input: []const u8) bool { - const version = strings.trim(input, &strings.whitespace_chars); - - // first needs to be a-z - if (version.len == 0 or !std.ascii.isAlphabetic(version[0])) return false; - - for (1..version.len) |i| { - if (!std.ascii.isAlphanumeric(version[i])) return false; - } - - return true; - } - - pub fn orderWithoutTag( - lhs: Version, - rhs: Version, - ) std.math.Order { - if (lhs.major < rhs.major) return .lt; - if (lhs.major > rhs.major) return .gt; - if (lhs.minor < rhs.minor) return .lt; - if (lhs.minor > rhs.minor) return .gt; - if (lhs.patch < rhs.patch) return .lt; - if (lhs.patch > rhs.patch) return .gt; - - if (lhs.tag.hasPre()) { - if (!rhs.tag.hasPre()) return .lt; - } else { - if (rhs.tag.hasPre()) return .gt; - } - - return .eq; - } - - pub fn order( - lhs: Version, - rhs: Version, - lhs_buf: []const u8, - rhs_buf: []const u8, - ) std.math.Order { - const order_without_tag = orderWithoutTag(lhs, rhs); - if (order_without_tag != .eq) return order_without_tag; - - return lhs.tag.order(rhs.tag, lhs_buf, rhs_buf); - } - - pub fn orderWithoutBuild( - lhs: Version, - rhs: Version, - lhs_buf: []const u8, - rhs_buf: []const u8, - ) std.math.Order { - const order_without_tag = orderWithoutTag(lhs, rhs); - if (order_without_tag != .eq) return order_without_tag; - - return lhs.tag.orderWithoutBuild(rhs.tag, lhs_buf, rhs_buf); - } - - pub const Tag = extern struct { - pre: ExternalString = ExternalString{}, - build: ExternalString = ExternalString{}, - - pub fn orderPre(lhs: Tag, rhs: Tag, lhs_buf: []const u8, rhs_buf: []const u8) std.math.Order { - const lhs_str = lhs.pre.slice(lhs_buf); - const rhs_str = rhs.pre.slice(rhs_buf); - - // 1. split each by '.', iterating through each one looking for integers - // 2. compare as integers, or if not possible compare as string - // 3. whichever is greater is the greater one - // - // 1.0.0-canary.0.0.0.0.0.0 < 1.0.0-canary.0.0.0.0.0.1 - - var lhs_itr = strings.split(lhs_str, "."); - var rhs_itr = strings.split(rhs_str, "."); - - while (true) { - const lhs_part = lhs_itr.next(); - const rhs_part = rhs_itr.next(); - - if (lhs_part == null and rhs_part == null) return .eq; - - // if right is null, left is greater than. - if (rhs_part == null) return .gt; - - // if left is null, left is less than. - if (lhs_part == null) return .lt; - - const lhs_uint: ?u32 = std.fmt.parseUnsigned(u32, lhs_part.?, 10) catch null; - const rhs_uint: ?u32 = std.fmt.parseUnsigned(u32, rhs_part.?, 10) catch null; - - // a part that doesn't parse as an integer is greater than a part that does - // https://github.com/npm/node-semver/blob/816c7b2cbfcb1986958a290f941eddfd0441139e/internal/identifiers.js#L12 - if (lhs_uint != null and rhs_uint == null) return .lt; - if (lhs_uint == null and rhs_uint != null) return .gt; - - if (lhs_uint == null and rhs_uint == null) { - switch (strings.order(lhs_part.?, rhs_part.?)) { - .eq => { - // continue to the next part - continue; - }, - else => |not_equal| return not_equal, - } - } - - switch (std.math.order(lhs_uint.?, rhs_uint.?)) { - .eq => continue, - else => |not_equal| return not_equal, - } - } - - unreachable; - } - - pub fn order( - lhs: Tag, - rhs: Tag, - lhs_buf: []const u8, - rhs_buf: []const u8, - ) std.math.Order { - if (!lhs.pre.isEmpty() and !rhs.pre.isEmpty()) { - return lhs.orderPre(rhs, lhs_buf, rhs_buf); - } - - const pre_order = lhs.pre.order(&rhs.pre, lhs_buf, rhs_buf); - if (pre_order != .eq) return pre_order; - - return lhs.build.order(&rhs.build, lhs_buf, rhs_buf); - } - - pub fn orderWithoutBuild( - lhs: Tag, - rhs: Tag, - lhs_buf: []const u8, - rhs_buf: []const u8, - ) std.math.Order { - if (!lhs.pre.isEmpty() and !rhs.pre.isEmpty()) { - return lhs.orderPre(rhs, lhs_buf, rhs_buf); - } - - return lhs.pre.order(&rhs.pre, lhs_buf, rhs_buf); - } - - pub fn cloneInto(this: Tag, slice: []const u8, buf: *[]u8) Tag { - var pre: String = this.pre.value; - var build: String = this.build.value; - - if (this.pre.isInline()) { - pre = this.pre.value; - } else { - const pre_slice = this.pre.slice(slice); - bun.copy(u8, buf.*, pre_slice); - pre = String.init(buf.*, buf.*[0..pre_slice.len]); - buf.* = buf.*[pre_slice.len..]; - } - - if (this.build.isInline()) { - build = this.build.value; - } else { - const build_slice = this.build.slice(slice); - bun.copy(u8, buf.*, build_slice); - build = String.init(buf.*, buf.*[0..build_slice.len]); - buf.* = buf.*[build_slice.len..]; - } - - return .{ - .pre = .{ - .value = pre, - .hash = this.pre.hash, - }, - .build = .{ - .value = build, - .hash = this.build.hash, - }, - }; - } - - pub inline fn hasPre(this: Tag) bool { - return !this.pre.isEmpty(); - } - - pub inline fn hasBuild(this: Tag) bool { - return !this.build.isEmpty(); - } - - pub fn eql(lhs: Tag, rhs: Tag) bool { - return lhs.pre.hash == rhs.pre.hash; - } - - pub const TagResult = struct { - tag: Tag = Tag{}, - len: u32 = 0, - }; - - var multi_tag_warn = false; - // TODO: support multiple tags - - pub fn parse(sliced_string: SlicedString) TagResult { - return parseWithPreCount(sliced_string, 0); - } - - pub fn parseWithPreCount(sliced_string: SlicedString, initial_pre_count: u32) TagResult { - var input = sliced_string.slice; - var build_count: u32 = 0; - var pre_count: u32 = initial_pre_count; - - for (input) |c| { - switch (c) { - ' ' => break, - '+' => { - build_count += 1; + '.' => { + result.valid = false; + is_done = true; + break; }, - '-' => { - pre_count += 1; + '-', '+' => { + // Just a plain tag with no version is invalid. + if (part_i < 2 and result.wildcard == .none) { + result.valid = false; + is_done = true; + break; + } + + part_start_i = i; + while (i < input.len and switch (input[i]) { + ' ' => true, + else => false, + }) { + i += 1; + } + const tag_result = Tag.parse(sliced_string.sub(input[part_start_i..])); + result.version.tag = tag_result.tag; + i += tag_result.len; + break; + }, + 'x', '*', 'X' => { + part_start_i = i; + i += 1; + + while (i < input.len and switch (input[i]) { + 'x', '*', 'X' => true, + else => false, + }) { + i += 1; + } + + last_char_i = i; + + if (i < input.len and switch (input[i]) { + '.' => true, + else => false, + }) { + i += 1; + } + + if (result.wildcard == .none) { + switch (part_i) { + 0 => { + result.wildcard = Query.Token.Wildcard.major; + part_i = 1; + }, + 1 => { + result.wildcard = Query.Token.Wildcard.minor; + part_i = 2; + }, + 2 => { + result.wildcard = Query.Token.Wildcard.patch; + part_i = 3; + }, + else => {}, + } + } + }, + else => |c| { + + // Some weirdo npm packages in the wild have a version like "1.0.0rc.1" + // npm just expects that to work...even though it has no "-" qualifier. + if (result.wildcard == .none and part_i >= 2 and switch (c) { + 'a'...'z', 'A'...'Z' => true, + else => false, + }) { + part_start_i = i; + const tag_result = Tag.parseWithPreCount(sliced_string.sub(input[part_start_i..]), 1); + result.version.tag = tag_result.tag; + i += tag_result.len; + is_done = true; + last_char_i = i; + break; + } + + last_char_i = 0; + result.valid = false; + is_done = true; + break; + }, + } + } + + if (result.wildcard == .none) { + switch (part_i) { + 0 => { + result.wildcard = Query.Token.Wildcard.major; + }, + 1 => { + result.wildcard = Query.Token.Wildcard.minor; + }, + 2 => { + result.wildcard = Query.Token.Wildcard.patch; }, else => {}, } } - if (build_count == 0 and pre_count == 0) { - return TagResult{ - .len = 0, - }; - } + result.len = @as(u32, @intCast(i)); - const State = enum { none, pre, build }; - var result = TagResult{}; - // Common case: no allocation is necessary. - var state = State.none; - var start: usize = 0; + return result; + } - var i: usize = 0; + fn parseVersionNumber(input: string) ?IntType { + // max decimal u64 is 18446744073709551615 + var bytes: [20]u8 = undefined; + var byte_i: u8 = 0; - while (i < input.len) : (i += 1) { - const c = input[i]; - switch (c) { - '+' => { - // qualifier ::= ( '-' pre )? ( '+' build )? - if (state == .pre or state == .none and initial_pre_count > 0) { - result.tag.pre = sliced_string.sub(input[start..i]).external(); - } + assert(input[0] != '.'); - if (state != .build) { - state = .build; - start = i + 1; - } - }, - '-' => { - if (state != .pre) { - state = .pre; - start = i + 1; - } - }, - - // only continue if character is a valid pre/build tag character - // https://semver.org/#spec-item-9 - 'a'...'z', 'A'...'Z', '0'...'9', '.' => {}, - - else => { - switch (state) { - .none => {}, - .pre => { - result.tag.pre = sliced_string.sub(input[start..i]).external(); - - state = State.none; - }, - .build => { - result.tag.build = sliced_string.sub(input[start..i]).external(); - if (comptime Environment.isDebug) { - assert(!strings.containsChar(result.tag.build.slice(sliced_string.buf), '-')); - } - state = State.none; - }, - } - result.len = @truncate(i); - break; + for (input) |char| { + switch (char) { + 'X', 'x', '*' => return null, + '0'...'9' => { + // out of bounds + if (byte_i + 1 > bytes.len) return null; + bytes[byte_i] = char; + byte_i += 1; }, + ' ', '.' => break, + // ignore invalid characters + else => {}, } } - if (state == .none and initial_pre_count > 0) { - state = .pre; - start = 0; + // If there are no numbers + if (byte_i == 0) return null; + + if (comptime Environment.isDebug) { + return std.fmt.parseInt(IntType, bytes[0..byte_i], 10) catch |err| { + Output.prettyErrorln("ERROR {s} parsing version: \"{s}\", bytes: {s}", .{ + @errorName(err), + input, + bytes[0..byte_i], + }); + return 0; + }; } - switch (state) { - .none => {}, - .pre => { - result.tag.pre = sliced_string.sub(input[start..i]).external(); - // a pre can contain multiple consecutive tags - // checking for "-" prefix is not enough, as --canary.67e7966.0 is a valid tag - state = State.none; - }, - .build => { - // a build can contain multiple consecutive tags - result.tag.build = sliced_string.sub(input[start..i]).external(); - - state = State.none; - }, - } - result.len = @as(u32, @truncate(i)); - - return result; + return std.fmt.parseInt(IntType, bytes[0..byte_i], 10) catch 0; } }; - - pub const ParseResult = struct { - wildcard: Query.Token.Wildcard = .none, - valid: bool = true, - version: Version.Partial = .{}, - len: u32 = 0, - }; - - pub fn parse(sliced_string: SlicedString) ParseResult { - var input = sliced_string.slice; - var result = ParseResult{}; - - var part_i: u8 = 0; - var part_start_i: usize = 0; - var last_char_i: usize = 0; - - if (input.len == 0) { - result.valid = false; - return result; - } - var is_done = false; - - var i: usize = 0; - - for (0..input.len) |c| { - switch (input[c]) { - // newlines & whitespace - ' ', - '\t', - '\n', - '\r', - std.ascii.control_code.vt, - std.ascii.control_code.ff, - - // version separators - 'v', - '=', - => {}, - else => { - i = c; - break; - }, - } - } - - if (i == input.len) { - result.valid = false; - return result; - } - - // two passes :( - while (i < input.len) { - if (is_done) { - break; - } - - switch (input[i]) { - ' ' => { - is_done = true; - break; - }, - '|', '^', '#', '&', '%', '!' => { - is_done = true; - if (i > 0) { - i -= 1; - } - break; - }, - '0'...'9' => { - part_start_i = i; - i += 1; - - while (i < input.len and switch (input[i]) { - '0'...'9' => true, - else => false, - }) { - i += 1; - } - - last_char_i = i; - - switch (part_i) { - 0 => { - result.version.major = parseVersionNumber(input[part_start_i..last_char_i]); - part_i = 1; - }, - 1 => { - result.version.minor = parseVersionNumber(input[part_start_i..last_char_i]); - part_i = 2; - }, - 2 => { - result.version.patch = parseVersionNumber(input[part_start_i..last_char_i]); - part_i = 3; - }, - else => {}, - } - - if (i < input.len and switch (input[i]) { - // `.` is expected only if there are remaining core version numbers - '.' => part_i != 3, - else => false, - }) { - i += 1; - } - }, - '.' => { - result.valid = false; - is_done = true; - break; - }, - '-', '+' => { - // Just a plain tag with no version is invalid. - if (part_i < 2 and result.wildcard == .none) { - result.valid = false; - is_done = true; - break; - } - - part_start_i = i; - while (i < input.len and switch (input[i]) { - ' ' => true, - else => false, - }) { - i += 1; - } - const tag_result = Tag.parse(sliced_string.sub(input[part_start_i..])); - result.version.tag = tag_result.tag; - i += tag_result.len; - break; - }, - 'x', '*', 'X' => { - part_start_i = i; - i += 1; - - while (i < input.len and switch (input[i]) { - 'x', '*', 'X' => true, - else => false, - }) { - i += 1; - } - - last_char_i = i; - - if (i < input.len and switch (input[i]) { - '.' => true, - else => false, - }) { - i += 1; - } - - if (result.wildcard == .none) { - switch (part_i) { - 0 => { - result.wildcard = Query.Token.Wildcard.major; - part_i = 1; - }, - 1 => { - result.wildcard = Query.Token.Wildcard.minor; - part_i = 2; - }, - 2 => { - result.wildcard = Query.Token.Wildcard.patch; - part_i = 3; - }, - else => {}, - } - } - }, - else => |c| { - - // Some weirdo npm packages in the wild have a version like "1.0.0rc.1" - // npm just expects that to work...even though it has no "-" qualifier. - if (result.wildcard == .none and part_i >= 2 and switch (c) { - 'a'...'z', 'A'...'Z' => true, - else => false, - }) { - part_start_i = i; - const tag_result = Tag.parseWithPreCount(sliced_string.sub(input[part_start_i..]), 1); - result.version.tag = tag_result.tag; - i += tag_result.len; - is_done = true; - last_char_i = i; - break; - } - - last_char_i = 0; - result.valid = false; - is_done = true; - break; - }, - } - } - - if (result.wildcard == .none) { - switch (part_i) { - 0 => { - result.wildcard = Query.Token.Wildcard.major; - }, - 1 => { - result.wildcard = Query.Token.Wildcard.minor; - }, - 2 => { - result.wildcard = Query.Token.Wildcard.patch; - }, - else => {}, - } - } - - result.len = @as(u32, @intCast(i)); - - return result; - } - - fn parseVersionNumber(input: string) ?u32 { - // max decimal u32 is 4294967295 - var bytes: [10]u8 = undefined; - var byte_i: u8 = 0; - - assert(input[0] != '.'); - - for (input) |char| { - switch (char) { - 'X', 'x', '*' => return null, - '0'...'9' => { - // out of bounds - if (byte_i + 1 > bytes.len) return null; - bytes[byte_i] = char; - byte_i += 1; - }, - ' ', '.' => break, - // ignore invalid characters - else => {}, - } - } - - // If there are no numbers - if (byte_i == 0) return null; - - if (comptime Environment.isDebug) { - return std.fmt.parseInt(u32, bytes[0..byte_i], 10) catch |err| { - Output.prettyErrorln("ERROR {s} parsing version: \"{s}\", bytes: {s}", .{ - @errorName(err), - input, - bytes[0..byte_i], - }); - return 0; - }; - } - - return std.fmt.parseInt(u32, bytes[0..byte_i], 10) catch 0; - } -}; +} const string = []const u8; diff --git a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap index 754469d9fe..ac3d396fea 100644 --- a/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-workspaces.test.ts.snap @@ -2,7 +2,7 @@ exports[`dependency on workspace without version in package.json: version: * 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -125,7 +125,7 @@ exports[`dependency on workspace without version in package.json: version: * 1`] exports[`dependency on workspace without version in package.json: version: *.*.* 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -248,7 +248,7 @@ exports[`dependency on workspace without version in package.json: version: *.*.* exports[`dependency on workspace without version in package.json: version: =* 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -371,7 +371,7 @@ exports[`dependency on workspace without version in package.json: version: =* 1` exports[`dependency on workspace without version in package.json: version: kjwoehcojrgjoj 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -494,7 +494,7 @@ exports[`dependency on workspace without version in package.json: version: kjwoe exports[`dependency on workspace without version in package.json: version: *.1.* 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -617,7 +617,7 @@ exports[`dependency on workspace without version in package.json: version: *.1.* exports[`dependency on workspace without version in package.json: version: *-pre 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": 2, @@ -740,7 +740,7 @@ exports[`dependency on workspace without version in package.json: version: *-pre exports[`dependency on workspace without version in package.json: version: 1 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -896,7 +896,7 @@ exports[`dependency on workspace without version in package.json: version: 1 1`] exports[`dependency on workspace without version in package.json: version: 1.* 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1052,7 +1052,7 @@ exports[`dependency on workspace without version in package.json: version: 1.* 1 exports[`dependency on workspace without version in package.json: version: 1.1.* 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1208,7 +1208,7 @@ exports[`dependency on workspace without version in package.json: version: 1.1.* exports[`dependency on workspace without version in package.json: version: 1.1.0 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1364,7 +1364,7 @@ exports[`dependency on workspace without version in package.json: version: 1.1.0 exports[`dependency on workspace without version in package.json: version: *-pre+build 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1520,7 +1520,7 @@ exports[`dependency on workspace without version in package.json: version: *-pre exports[`dependency on workspace without version in package.json: version: *+build 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1676,7 +1676,7 @@ exports[`dependency on workspace without version in package.json: version: *+bui exports[`dependency on workspace without version in package.json: version: latest 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1832,7 +1832,7 @@ exports[`dependency on workspace without version in package.json: version: lates exports[`dependency on workspace without version in package.json: version: 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ @@ -1988,7 +1988,7 @@ exports[`dependency on workspace without version in package.json: version: 1`] exports[`dependency on same name as workspace and dist-tag: with version 1`] = ` "{ - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "no-deps": [ diff --git a/test/cli/install/fixtures/bun.lockb.v2 b/test/cli/install/fixtures/bun.lockb.v2 new file mode 100755 index 0000000000..2e958fd919 Binary files /dev/null and b/test/cli/install/fixtures/bun.lockb.v2 differ diff --git a/test/cli/install/migrate-bun-lockb-v2.test.ts b/test/cli/install/migrate-bun-lockb-v2.test.ts new file mode 100644 index 0000000000..bfaa86db75 --- /dev/null +++ b/test/cli/install/migrate-bun-lockb-v2.test.ts @@ -0,0 +1,63 @@ +import { file, spawn } from "bun"; +import { install_test_helpers } from "bun:internal-for-testing"; +import { expect, test } from "bun:test"; +import { bunEnv, bunExe, tempDir } from "harness"; +import { cp } from "node:fs/promises"; +import { join } from "node:path"; +const { parseLockfile } = install_test_helpers; + +test("old binary lockfile migrates successfully", async () => { + const oldLockfileContents = await file(join(import.meta.dir, "fixtures/bun.lockb.v2")).text(); + using testDir = tempDir("migrate-bun-lockb-v2", { + "bunfig.toml": "install.saveTextLockfile = false", + "package.json": JSON.stringify({ + name: "migrate-bun-lockb-v2", + dependencies: { + jquery: "~3.7.1", + "is-even": "^1.0.0", + }, + }), + }); + + await cp(join(import.meta.dir, "fixtures/bun.lockb.v2"), join(testDir, "bun.lockb")); + + const oldLockfile = parseLockfile(testDir); + + let { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: testDir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + let err = await stderr.text(); + + expect(await exited).toBe(0); + expect(err).toContain("Saved lockfile"); + + const newLockfileContents = await file(join(testDir, "bun.lockb")).bytes(); + const newLockfile = parseLockfile(testDir); + + // contents should be different due to semver numbers changing size + expect(newLockfileContents).not.toEqual(oldLockfileContents); + // but parse result should be the same + expect(newLockfile).toEqual(oldLockfile); + + // another install should not change the lockfile + ({ stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: testDir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + })); + + expect(await exited).toBe(0); + expect(await stderr.text()).not.toContain("Saved lockfile"); + + const newLockfileContents2 = await file(join(testDir, "bun.lockb")).bytes(); + const newLockfile2 = parseLockfile(testDir); + expect(newLockfileContents2).toEqual(newLockfileContents); + expect(newLockfile2).toEqual(newLockfile); +}); diff --git a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap index 99477ec104..89959331b6 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server-ssr-100.test.ts.snap @@ -13596,7 +13596,7 @@ exports[`ssr works for 100-ish requests 1`] = ` "package_id": null, }, ], - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "@alloc/quick-lru": 1, diff --git a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap index 4c8b3b6725..321cb36341 100644 --- a/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/dev-server.test.ts.snap @@ -13596,7 +13596,7 @@ exports[`hot reloading works on the client (+ tailwind hmr) 1`] = ` "package_id": null, }, ], - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "@alloc/quick-lru": 1, diff --git a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap index 8be4c02bd4..05004d7e99 100644 --- a/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap +++ b/test/integration/next-pages/test/__snapshots__/next-build.test.ts.snap @@ -13596,7 +13596,7 @@ exports[`next build works: bun 1`] = ` "package_id": null, }, ], - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "@alloc/quick-lru": 1, @@ -39534,7 +39534,7 @@ exports[`next build works: node 1`] = ` "package_id": null, }, ], - "format": "v2", + "format": "v3", "meta_hash": "0000000000000000000000000000000000000000000000000000000000000000", "package_index": { "@alloc/quick-lru": 1,