diff --git a/docs/cli/publish.md b/docs/cli/publish.md index 29f3ed1877..6adf9c4a84 100644 --- a/docs/cli/publish.md +++ b/docs/cli/publish.md @@ -1,6 +1,6 @@ Use `bun publish` to publish a package to the npm registry. -`bun publish` will automatically pack your package into a tarball, strip workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported. +`bun publish` will automatically pack your package into a tarball, strip catalog and workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported. ```sh ## Publishing the package from the current working directory diff --git a/docs/install/catalogs.md b/docs/install/catalogs.md new file mode 100644 index 0000000000..f4d23b6359 --- /dev/null +++ b/docs/install/catalogs.md @@ -0,0 +1,289 @@ +Catalogs in Bun provide a straightforward way to share common dependency versions across multiple packages in a monorepo. Rather than specifying the same versions repeatedly in each workspace package, you define them once in the root package.json and reference them consistently throughout your project. + +## Overview + +Unlike traditional dependency management where each workspace package needs to independently specify versions, catalogs let you: + +1. Define version catalogs in the root package.json +2. Reference these versions with a simple `catalog:` protocol +3. Update all packages simultaneously by changing the version in just one place + +This is especially useful in large monorepos where dozens of packages need to use the same version of key dependencies. + +## How to Use Catalogs + +### Directory Structure Example + +Consider a monorepo with the following structure: + +``` +my-monorepo/ +├── package.json +├── bun.lock +└── packages/ + ├── app/ + │ └── package.json + ├── ui/ + │ └── package.json + └── utils/ + └── package.json +``` + +### 1. Define Catalogs in Root package.json + +In your root-level `package.json`, add a `catalog` or `catalogs` field within the `workspaces` object: + +```json +{ + "name": "my-monorepo", + "workspaces": { + "packages": ["packages/*"], + "catalog": { + "react": "^19.0.0", + "react-dom": "^19.0.0" + }, + "catalogs": { + "testing": { + "jest": "30.0.0", + "testing-library": "14.0.0" + } + } + } +} +``` + +### 2. Reference Catalog Versions in Workspace Packages + +In your workspace packages, use the `catalog:` protocol to reference versions: + +**packages/app/package.json** + +```json +{ + "name": "app", + "dependencies": { + "react": "catalog:", + "react-dom": "catalog:", + "jest": "catalog:testing" + } +} +``` + +**packages/ui/package.json** + +```json +{ + "name": "ui", + "dependencies": { + "react": "catalog:", + "react-dom": "catalog:" + }, + "devDependencies": { + "jest": "catalog:testing", + "testing-library": "catalog:testing" + } +} +``` + +### 3. Run Bun Install + +Run `bun install` to install all dependencies according to the catalog versions. + +## Catalog vs Catalogs + +Bun supports two ways to define catalogs: + +1. **`catalog`** (singular): A single default catalog for commonly used dependencies + + ```json + "catalog": { + "react": "^19.0.0", + "react-dom": "^19.0.0" + } + ``` + + Reference with simply `catalog:`: + + ```json + "dependencies": { + "react": "catalog:" + } + ``` + +2. **`catalogs`** (plural): Multiple named catalogs for grouping dependencies + + ```json + "catalogs": { + "testing": { + "jest": "30.0.0" + }, + "ui": { + "tailwind": "4.0.0" + } + } + ``` + + Reference with `catalog:`: + + ```json + "dependencies": { + "jest": "catalog:testing", + "tailwind": "catalog:ui" + } + ``` + +## Benefits of Using Catalogs + +- **Consistency**: Ensures all packages use the same version of critical dependencies +- **Maintenance**: Update a dependency version in one place instead of across multiple package.json files +- **Clarity**: Makes it obvious which dependencies are standardized across your monorepo +- **Simplicity**: No need for complex version resolution strategies or external tools + +## Real-World Example + +Here's a more comprehensive example for a React application: + +**Root package.json** + +```json +{ + "name": "react-monorepo", + "workspaces": { + "packages": ["packages/*"], + "catalog": { + "react": "^19.0.0", + "react-dom": "^19.0.0", + "react-router-dom": "^6.15.0" + }, + "catalogs": { + "build": { + "webpack": "5.88.2", + "babel": "7.22.10" + }, + "testing": { + "jest": "29.6.2", + "react-testing-library": "14.0.0" + } + } + }, + "devDependencies": { + "typescript": "5.1.6" + } +} +``` + +**packages/app/package.json** + +```json +{ + "name": "app", + "dependencies": { + "react": "catalog:", + "react-dom": "catalog:", + "react-router-dom": "catalog:", + "@monorepo/ui": "workspace:*", + "@monorepo/utils": "workspace:*" + }, + "devDependencies": { + "webpack": "catalog:build", + "babel": "catalog:build", + "jest": "catalog:testing", + "react-testing-library": "catalog:testing" + } +} +``` + +**packages/ui/package.json** + +```json +{ + "name": "@monorepo/ui", + "dependencies": { + "react": "catalog:", + "react-dom": "catalog:" + }, + "devDependencies": { + "jest": "catalog:testing", + "react-testing-library": "catalog:testing" + } +} +``` + +**packages/utils/package.json** + +```json +{ + "name": "@monorepo/utils", + "dependencies": { + "react": "catalog:" + }, + "devDependencies": { + "jest": "catalog:testing" + } +} +``` + +## Updating Versions + +To update versions across all packages, simply change the version in the root package.json: + +```json +"catalog": { + "react": "^19.1.0", // Updated from ^19.0.0 + "react-dom": "^19.1.0" // Updated from ^19.0.0 +} +``` + +Then run `bun install` to update all packages. + +## Lockfile Integration + +Bun's lockfile tracks catalog versions, making it easy to ensure consistent installations across different environments. The lockfile includes: + +- The catalog definitions from your package.json +- The resolution of each cataloged dependency + +``` +// bun.lock (excerpt) +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "react-monorepo", + }, + "packages/app": { + "name": "app", + "dependencies": { + "react": "catalog:", + "react-dom": "catalog:", + ... + }, + }, + ... + }, + "catalog": { + "react": "^19.0.0", + "react-dom": "^19.0.0", + ... + }, + "catalogs": { + "build": { + "webpack": "5.88.2", + ... + }, + ... + }, + "packages": { + ... + } +} +``` + +## Limitations and Edge Cases + +- Catalog references must match a dependency defined in either `catalog` or one of the named `catalogs` +- Empty strings and whitespace in catalog names are ignored (treated as default catalog) +- Invalid dependency versions in catalogs will fail to resolve during `bun install` +- Catalogs are only available within workspaces; they cannot be used outside the monorepo + +Bun's catalog system provides a powerful yet simple way to maintain consistency across your monorepo without introducing additional complexity to your workflow. diff --git a/docs/nav.ts b/docs/nav.ts index 5a9968911e..89d6317d86 100644 --- a/docs/nav.ts +++ b/docs/nav.ts @@ -183,6 +183,9 @@ export default { page("install/workspaces", "Workspaces", { description: "Bun's package manager supports workspaces and monorepo development workflows.", }), + page("install/catalogs", "Catalogs", { + description: "Use catalogs to share dependency versions between packages in a monorepo.", + }), page("install/lifecycle", "Lifecycle scripts", { description: "How Bun handles package lifecycle scripts with trustedDependencies", }), diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 3518d52956..1e0f5ee945 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -2035,7 +2035,7 @@ pub const PackCommand = struct { return entry.clear(); } - /// Strip workspace protocols from dependency versions then + /// Strips workspace and catalog protocols from dependency versions then /// returns the printed json fn editRootPackageJSON( allocator: std.mem.Allocator, @@ -2126,6 +2126,48 @@ pub const PackCommand = struct { }, .{}, ); + } else if (strings.withoutPrefixIfPossibleComptime(package_spec, "catalog:")) |catalog_name_str| { + const dep_name_str = dependency.key.?.asString(allocator).?; + + const lockfile = maybe_lockfile orelse { + Output.errGeneric("Failed to resolve catalog version for \"{s}\" in `{s}` (catalogs require a lockfile).", .{ + dep_name_str, + dependency_group, + }); + Global.crash(); + }; + + const catalog_name = Semver.String.init(catalog_name_str, catalog_name_str); + + const catalog = lockfile.catalogs.getGroup(lockfile.buffers.string_bytes.items, catalog_name, catalog_name_str) orelse { + Output.errGeneric("Failed to resolve catalog version for \"{s}\" in `{s}` (no matching catalog).", .{ + dep_name_str, + dependency_group, + }); + Global.crash(); + }; + + const dep_name = Semver.String.init(dep_name_str, dep_name_str); + + const dep = catalog.getContext(dep_name, Semver.String.ArrayHashContext{ + .arg_buf = dep_name_str, + .existing_buf = lockfile.buffers.string_bytes.items, + }) orelse { + Output.errGeneric("Failed to resolve catalog version for \"{s}\" in `{s}` (no matching catalog dependency).", .{ + dep_name_str, + dependency_group, + }); + Global.crash(); + }; + + dependency.value = Expr.allocate( + allocator, + E.String, + .{ + .data = try allocator.dupe(u8, dep.version.literal.slice(lockfile.buffers.string_bytes.items)), + }, + .{}, + ); } } }, diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig index 36be553942..d00e5db5f1 100644 --- a/src/install/bun.lock.zig +++ b/src/install/bun.lock.zig @@ -68,7 +68,7 @@ pub const Stringifier = struct { // _ = this; // } - pub fn saveFromBinary(allocator: std.mem.Allocator, lockfile: *const BinaryLockfile, load_result: *const LoadResult, writer: anytype) @TypeOf(writer).Error!void { + pub fn saveFromBinary(allocator: std.mem.Allocator, lockfile: *BinaryLockfile, load_result: *const LoadResult, writer: anytype) @TypeOf(writer).Error!void { const buf = lockfile.buffers.string_bytes.items; const extern_strings = lockfile.buffers.extern_strings.items; const deps_buf = lockfile.buffers.dependencies.items; @@ -335,26 +335,8 @@ pub const Stringifier = struct { } if (lockfile.overrides.map.count() > 0) { - var overrides_buf: std.ArrayListUnmanaged(Dependency) = try .initCapacity(allocator, lockfile.overrides.map.count()); - defer overrides_buf.deinit(allocator); + lockfile.overrides.sort(lockfile); - try overrides_buf.appendSlice(allocator, lockfile.overrides.map.values()); - - const OverridesSortCtx = struct { - buf: string, - - pub fn lessThan(this: *@This(), l_dep: Dependency, r_dep: Dependency) bool { - return l_dep.name.order(&r_dep.name, this.buf, this.buf) == .lt; - } - }; - - var sort_ctx: OverridesSortCtx = .{ - .buf = buf, - }; - - std.sort.pdq(Dependency, overrides_buf.items, &sort_ctx, OverridesSortCtx.lessThan); - - // lockfile.overrides.sort(lockfile); try writeIndent(writer, indent); try writer.writeAll( \\"overrides": { @@ -373,6 +355,64 @@ pub const Stringifier = struct { try writer.writeAll("},\n"); } + if (lockfile.catalogs.hasAny()) { + // this will sort the default map, and each + // named catalog map + lockfile.catalogs.sort(lockfile); + } + + if (lockfile.catalogs.default.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"catalog": { + \\ + ); + indent.* += 1; + for (lockfile.catalogs.default.values()) |catalog_dep| { + try writeIndent(writer, indent); + try writer.print( + \\{}: {}, + \\ + , .{ catalog_dep.name.fmtJson(buf, .{}), catalog_dep.version.literal.fmtJson(buf, .{}) }); + } + + try decIndent(writer, indent); + try writer.writeAll("},\n"); + } + + if (lockfile.catalogs.groups.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"catalogs": { + \\ + ); + indent.* += 1; + + var iter = lockfile.catalogs.groups.iterator(); + while (iter.next()) |entry| { + const catalog_name = entry.key_ptr; + const catalog_deps = entry.value_ptr; + + try writeIndent(writer, indent); + try writer.print("{}: {{\n", .{catalog_name.fmtJson(buf, .{})}); + indent.* += 1; + + for (catalog_deps.values()) |catalog_dep| { + try writeIndent(writer, indent); + try writer.print( + \\{}: {}, + \\ + , .{ catalog_dep.name.fmtJson(buf, .{}), catalog_dep.version.literal.fmtJson(buf, .{}) }); + } + + try decIndent(writer, indent); + try writer.writeAll("},\n"); + } + + try decIndent(writer, indent); + try writer.writeAll("},\n"); + } + var tree_deps_sort_buf: std.ArrayListUnmanaged(DependencyID) = .{}; defer tree_deps_sort_buf.deinit(allocator); @@ -1001,6 +1041,8 @@ const ParseError = OOM || error{ InvalidPackagesTree, InvalidTrustedDependenciesSet, InvalidOverridesObject, + InvalidCatalogObject, + InvalidCatalogsObject, InvalidDependencyName, InvalidDependencyVersion, InvalidPackageResolution, @@ -1251,6 +1293,148 @@ pub fn parseIntoBinaryLockfile( } } + if (root.get("catalog")) |catalog_expr| { + if (!catalog_expr.isObject()) { + try log.addError(source, catalog_expr.loc, "Expected an object"); + return error.InvalidCatalogObject; + } + + for (catalog_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + if (!key.isString() or key.data.e_string.len() == 0) { + try log.addError(source, key.loc, "Expected a non-empty string"); + return error.InvalidCatalogObject; + } + + const dep_name_str = key.asString(allocator).?; + const dep_name_hash = String.Builder.stringHash(dep_name_str); + const dep_name = try string_buf.appendWithHash(dep_name_str, dep_name_hash); + + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidCatalogObject; + } + + const version_str = value.asString(allocator).?; + const version_hash = String.Builder.stringHash(version_str); + const version = try string_buf.appendWithHash(version_str, version_hash); + const version_sliced = version.sliced(string_buf.bytes.items); + + const dep: Dependency = .{ + .name = dep_name, + .name_hash = dep_name_hash, + .version = Dependency.parse( + allocator, + dep_name, + dep_name_hash, + version_sliced.slice, + &version_sliced, + log, + manager, + ) orelse { + try log.addError(source, value.loc, "Invalid catalog version"); + return error.InvalidCatalogObject; + }, + }; + + const entry = try lockfile.catalogs.default.getOrPutContext( + allocator, + dep_name, + String.arrayHashContext(lockfile, null), + ); + + if (entry.found_existing) { + try log.addError(source, key.loc, "Duplicate catalog entry"); + return error.InvalidCatalogObject; + } + + entry.value_ptr.* = dep; + } + } + + if (root.get("catalogs")) |catalogs_expr| { + if (!catalogs_expr.isObject()) { + try log.addError(source, catalogs_expr.loc, "Expected an object"); + return error.InvalidCatalogsObject; + } + + for (catalogs_expr.data.e_object.properties.slice()) |catalog_prop| { + const catalog_key = catalog_prop.key.?; + const catalog_value = catalog_prop.value.?; + + if (!catalog_key.isString() or catalog_key.data.e_string.len() == 0) { + try log.addError(source, catalog_key.loc, "Expected a non-empty string"); + return error.InvalidCatalogsObject; + } + + if (!catalog_value.isObject()) { + try log.addError(source, catalog_value.loc, "Expected an object"); + return error.InvalidCatalogsObject; + } + + const catalog_name_str = catalog_key.asString(allocator).?; + const catalog_name = try string_buf.append(catalog_name_str); + + const group = try lockfile.catalogs.getOrPutGroup(lockfile, catalog_name); + + for (catalog_value.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + if (!key.isString() or key.data.e_string.len() == 0) { + try log.addError(source, key.loc, "Expected a non-empty string"); + return error.InvalidCatalogObject; + } + + const dep_name_str = key.asString(allocator).?; + const dep_name_hash = String.Builder.stringHash(dep_name_str); + const dep_name = try string_buf.appendWithHash(dep_name_str, dep_name_hash); + + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidCatalogObject; + } + + const version_str = value.asString(allocator).?; + const version_hash = String.Builder.stringHash(version_str); + const version = try string_buf.appendWithHash(version_str, version_hash); + const version_sliced = version.sliced(string_buf.bytes.items); + + const dep: Dependency = .{ + .name = dep_name, + .name_hash = dep_name_hash, + .version = Dependency.parse( + allocator, + dep_name, + dep_name_hash, + version_sliced.slice, + &version_sliced, + log, + manager, + ) orelse { + try log.addError(source, value.loc, "Invalid catalog version"); + return error.InvalidCatalogObject; + }, + }; + + const entry = try group.getOrPutContext( + allocator, + dep_name, + String.arrayHashContext(lockfile, null), + ); + + if (entry.found_existing) { + try log.addError(source, key.loc, "Duplicate catalog entry"); + return error.InvalidCatalogObject; + } + + entry.value_ptr.* = dep; + } + } + } + const workspaces_obj = root.getObject("workspaces") orelse { try log.addError(source, root.loc, "Missing a workspaces object property"); return error.InvalidWorkspaceObject; diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 5315714409..cad0042e8f 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -495,6 +495,8 @@ pub const Version = struct { /// GitHub Repository (via REST API) github = 8, + catalog = 9, + pub const map = bun.ComptimeStringMap(Tag, .{ .{ "npm", .npm }, .{ "dist_tag", .dist_tag }, @@ -504,6 +506,7 @@ pub const Version = struct { .{ "workspace", .workspace }, .{ "git", .git }, .{ "github", .github }, + .{ "catalog", .catalog }, }); pub const fromJS = map.fromJS; @@ -575,6 +578,11 @@ pub const Version = struct { return .folder; } }, + 'c' => { + if (strings.hasPrefixComptime(dependency, "catalog:")) { + return .catalog; + } + }, // git_user/repo // git_tarball.tgz // github:user/repo @@ -820,6 +828,10 @@ pub const Version = struct { workspace: String, git: Repository, github: Repository, + + // dep version without 'catalog:' protocol + // empty string == default catalog + catalog: String, }; }; @@ -1240,6 +1252,19 @@ pub fn parseWithTag( .literal = sliced.value(), }; }, + .catalog => { + bun.assert(strings.hasPrefixComptime(dependency, "catalog:")); + + const group = dependency["catalog:".len..]; + + const trimmed = strings.trim(group, &strings.whitespace_chars); + + return .{ + .value = .{ .catalog = sliced.sub(trimmed).value() }, + .tag = .catalog, + .literal = sliced.value(), + }; + }, } } diff --git a/src/install/install.zig b/src/install/install.zig index b4eac1434a..64706024bc 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -5175,6 +5175,23 @@ pub const PackageManager = struct { const debug = Output.scoped(.PackageManager, true); + fn updateNameAndNameHashFromVersionReplacement( + lockfile: *const Lockfile, + original_name: String, + original_name_hash: PackageNameHash, + new_version: Dependency.Version, + ) struct { String, PackageNameHash } { + return switch (new_version.tag) { + // only get name hash for npm and dist_tag. git, github, tarball don't have names until after extracting tarball + .dist_tag => .{ new_version.value.dist_tag.name, String.Builder.stringHash(lockfile.str(&new_version.value.dist_tag.name)) }, + .npm => .{ new_version.value.npm.name, String.Builder.stringHash(lockfile.str(&new_version.value.npm.name)) }, + .git => .{ new_version.value.git.package_name, original_name_hash }, + .github => .{ new_version.value.github.package_name, original_name_hash }, + .tarball => .{ new_version.value.tarball.package_name, original_name_hash }, + else => .{ original_name, original_name_hash }, + }; + } + /// Q: "What do we do with a dependency in a package.json?" /// A: "We enqueue it!" fn enqueueDependencyWithMainAndSuccessFn( @@ -5222,22 +5239,30 @@ pub const PackageManager = struct { // allow overriding all dependencies unless the dependency is coming directly from an alias, "npm:" or // if it's a workspaceOnly dependency - if (!dependency.behavior.isWorkspaceOnly() and (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias and this.lockfile.hasOverrides())) { + if (!dependency.behavior.isWorkspaceOnly() and (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias)) { if (this.lockfile.overrides.get(name_hash)) |new| { debug("override: {s} -> {s}", .{ this.lockfile.str(&dependency.version.literal), this.lockfile.str(&new.literal) }); - name, name_hash = switch (new.tag) { - // only get name hash for npm and dist_tag. git, github, tarball don't have names until after extracting tarball - .dist_tag => .{ new.value.dist_tag.name, String.Builder.stringHash(this.lockfile.str(&new.value.dist_tag.name)) }, - .npm => .{ new.value.npm.name, String.Builder.stringHash(this.lockfile.str(&new.value.npm.name)) }, - .git => .{ new.value.git.package_name, name_hash }, - .github => .{ new.value.github.package_name, name_hash }, - .tarball => .{ new.value.tarball.package_name, name_hash }, - else => .{ name, name_hash }, - }; + + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, new); + + if (new.tag == .catalog) { + if (this.lockfile.catalogs.get(this.lockfile, new.value.catalog, name)) |catalog_dep| { + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); + break :version catalog_dep.version; + } + } // `name_hash` stays the same break :version new; } + + if (dependency.version.tag == .catalog) { + if (this.lockfile.catalogs.get(this.lockfile, dependency.version.value.catalog, name)) |catalog_dep| { + name, name_hash = updateNameAndNameHashFromVersionReplacement(this.lockfile, name, name_hash, catalog_dep.version); + + break :version catalog_dep.version; + } + } } // explicit copy here due to `dependency.version` becoming undefined @@ -14730,6 +14755,7 @@ pub const PackageManager = struct { for (lockfile.patched_dependencies.values()) |patch_dep| builder.count(patch_dep.path.slice(lockfile.buffers.string_bytes.items)); lockfile.overrides.count(&lockfile, builder); + lockfile.catalogs.count(&lockfile, builder); maybe_root.scripts.count(lockfile.buffers.string_bytes.items, *Lockfile.StringBuilder, builder); const off = @as(u32, @truncate(manager.lockfile.buffers.dependencies.items.len)); @@ -14762,6 +14788,7 @@ pub const PackageManager = struct { }; manager.lockfile.overrides = try lockfile.overrides.clone(manager, &lockfile, manager.lockfile, builder); + manager.lockfile.catalogs = try lockfile.catalogs.clone(manager, &lockfile, manager.lockfile, builder); manager.lockfile.trusted_dependencies = if (lockfile.trusted_dependencies) |trusted_dependencies| try trusted_dependencies.clone(manager.lockfile.allocator) @@ -14873,13 +14900,28 @@ pub const PackageManager = struct { try manager.enqueueDependencyWithMain( @truncate(dependency_i), dependency, - manager.lockfile.buffers.resolutions.items[dependency_i], + invalid_package_id, false, ); } } } + if (manager.summary.catalogs_changed) { + for (manager.lockfile.buffers.dependencies.items, 0..) |*dep, _dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + if (dep.version.tag != .catalog) continue; + + manager.lockfile.buffers.resolutions.items[dep_id] = invalid_package_id; + try manager.enqueueDependencyWithMain( + dep_id, + dep, + invalid_package_id, + false, + ); + } + } + // Split this into two passes because the below may allocate memory or invalidate pointers if (manager.summary.add > 0 or manager.summary.update > 0) { const changes = @as(PackageID, @truncate(mapping.len)); diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 5fe6f09495..40175a60ae 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -156,6 +156,7 @@ workspace_versions: VersionHashMap = .{}, trusted_dependencies: ?TrustedDependenciesSet = null, patched_dependencies: PatchedDependenciesMap = .{}, overrides: OverrideMap = .{}, +catalogs: CatalogMap = .{}, const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; @@ -465,6 +466,7 @@ pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator this.workspace_paths = .{}; this.workspace_versions = .{}; this.overrides = .{}; + this.catalogs = .{}; this.patched_dependencies = .{}; const load_result = Lockfile.Serializer.load(this, &stream, allocator, log, pm) catch |err| { @@ -1385,8 +1387,10 @@ pub fn cleanWithLogger( { var builder = new.stringBuilder(); old.overrides.count(old, &builder); + old.catalogs.count(old, &builder); try builder.allocate(); new.overrides = try old.overrides.clone(manager, old, new, &builder); + new.catalogs = try old.catalogs.clone(manager, old, new, &builder); } // Step 1. Recreate the lockfile with only the packages that are still alive @@ -2640,6 +2644,7 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) void { .workspace_paths = .{}, .workspace_versions = .{}, .overrides = .{}, + .catalogs = .{}, .meta_hash = zero_hash, }; } @@ -2996,9 +3001,374 @@ pub const PackageIndex = struct { }; }; -pub inline fn hasOverrides(this: *Lockfile) bool { - return this.overrides.map.count() > 0; -} +pub const CatalogMap = struct { + const Map = std.ArrayHashMapUnmanaged(String, Dependency, String.ArrayHashContext, true); + + default: Map = .{}, + groups: std.ArrayHashMapUnmanaged(String, Map, String.ArrayHashContext, true) = .{}, + + pub fn hasAny(this: *const CatalogMap) bool { + return this.default.count() > 0 or this.groups.count() > 0; + } + + pub fn get(this: *CatalogMap, lockfile: *const Lockfile, catalog_name: String, dep_name: String) ?Dependency { + if (catalog_name.isEmpty()) { + if (this.default.count() == 0) { + return null; + } + return this.default.getContext(dep_name, String.arrayHashContext(lockfile, null)) orelse { + return null; + }; + } + + const group = this.groups.getContext(catalog_name, String.arrayHashContext(lockfile, null)) orelse { + return null; + }; + + if (group.count() == 0) { + return null; + } + + return group.getContext(dep_name, String.arrayHashContext(lockfile, null)) orelse { + return null; + }; + } + + pub fn getOrPutGroup(this: *CatalogMap, lockfile: *Lockfile, catalog_name: String) OOM!*Map { + if (catalog_name.isEmpty()) { + return &this.default; + } + + const entry = try this.groups.getOrPutContext( + lockfile.allocator, + catalog_name, + String.arrayHashContext(lockfile, null), + ); + if (!entry.found_existing) { + entry.value_ptr.* = .{}; + } + + return entry.value_ptr; + } + + pub fn getGroup(this: *CatalogMap, map_buf: string, catalog_name: String, catalog_name_buf: string) ?*Map { + if (catalog_name.isEmpty()) { + return &this.default; + } + + return this.groups.getPtrContext(catalog_name, String.ArrayHashContext{ + .arg_buf = catalog_name_buf, + .existing_buf = map_buf, + }); + } + + pub fn parseCount(_: *CatalogMap, lockfile: *Lockfile, expr: Expr, builder: *Lockfile.StringBuilder) void { + if (expr.get("catalog")) |default_catalog| { + switch (default_catalog.data) { + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const dep_name = item.key.?.asString(lockfile.allocator).?; + builder.count(dep_name); + switch (item.value.?.data) { + .e_string => |version_str| { + builder.count(version_str.slice(lockfile.allocator)); + }, + else => {}, + } + } + }, + else => {}, + } + } + + if (expr.get("catalogs")) |catalogs| { + switch (catalogs.data) { + .e_object => |catalog_names| { + for (catalog_names.properties.slice()) |catalog| { + const catalog_name = catalog.key.?.asString(lockfile.allocator).?; + builder.count(catalog_name); + switch (catalog.value.?.data) { + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const dep_name = item.key.?.asString(lockfile.allocator).?; + builder.count(dep_name); + switch (item.value.?.data) { + .e_string => |version_str| { + builder.count(version_str.slice(lockfile.allocator)); + }, + else => {}, + } + } + }, + else => {}, + } + } + }, + else => {}, + } + } + } + + pub fn parseAppend( + this: *CatalogMap, + pm: *PackageManager, + lockfile: *Lockfile, + log: *logger.Log, + source: *const logger.Source, + expr: Expr, + builder: *Lockfile.StringBuilder, + ) OOM!void { + if (expr.get("catalog")) |default_catalog| { + const group = try this.getOrPutGroup(lockfile, .empty); + switch (default_catalog.data) { + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const dep_name_str = item.key.?.asString(lockfile.allocator).?; + + const dep_name_hash = String.Builder.stringHash(dep_name_str); + const dep_name = builder.appendWithHash(String, dep_name_str, dep_name_hash); + + switch (item.value.?.data) { + .e_string => |version_str| { + const version_literal = builder.append(String, version_str.slice(lockfile.allocator)); + + const version_sliced = version_literal.sliced(lockfile.buffers.string_bytes.items); + + const version = Dependency.parse( + lockfile.allocator, + dep_name, + dep_name_hash, + version_sliced.slice, + &version_sliced, + log, + pm, + ) orelse { + try log.addError(source, item.value.?.loc, "Invalid dependency version"); + continue; + }; + + const entry = try group.getOrPutContext( + lockfile.allocator, + dep_name, + String.arrayHashContext(lockfile, null), + ); + + if (entry.found_existing) { + try log.addError(source, item.key.?.loc, "Duplicate catalog"); + continue; + } + + const dep: Dependency = .{ + .name = dep_name, + .name_hash = dep_name_hash, + .version = version, + }; + + entry.value_ptr.* = dep; + }, + else => {}, + } + } + }, + else => {}, + } + } + + if (expr.get("catalogs")) |catalogs| { + switch (catalogs.data) { + .e_object => |catalog_names| { + for (catalog_names.properties.slice()) |catalog| { + const catalog_name_str = catalog.key.?.asString(lockfile.allocator).?; + const catalog_name = builder.append(String, catalog_name_str); + + const group = try this.getOrPutGroup(lockfile, catalog_name); + + switch (catalog.value.?.data) { + .e_object => |obj| { + for (obj.properties.slice()) |item| { + const dep_name_str = item.key.?.asString(lockfile.allocator).?; + const dep_name_hash = String.Builder.stringHash(dep_name_str); + const dep_name = builder.appendWithHash(String, dep_name_str, dep_name_hash); + switch (item.value.?.data) { + .e_string => |version_str| { + const version_literal = builder.append(String, version_str.slice(lockfile.allocator)); + const version_sliced = version_literal.sliced(lockfile.buffers.string_bytes.items); + + const version = Dependency.parse( + lockfile.allocator, + dep_name, + dep_name_hash, + version_sliced.slice, + &version_sliced, + log, + pm, + ) orelse { + try log.addError(source, item.value.?.loc, "Invalid dependency version"); + continue; + }; + + const entry = try group.getOrPutContext( + lockfile.allocator, + dep_name, + + String.arrayHashContext(lockfile, null), + ); + + if (entry.found_existing) { + try log.addError(source, item.key.?.loc, "Duplicate catalog"); + continue; + } + + const dep: Dependency = .{ + .name = dep_name, + .name_hash = dep_name_hash, + .version = version, + }; + + entry.value_ptr.* = dep; + }, + else => {}, + } + } + }, + else => {}, + } + } + }, + else => {}, + } + } + } + + pub fn sort(this: *CatalogMap, lockfile: *const Lockfile) void { + const DepSortCtx = struct { + buf: string, + catalog_deps: [*]const Dependency, + + pub fn lessThan(sorter: *@This(), l: usize, r: usize) bool { + const deps = sorter.catalog_deps; + const l_dep = deps[l]; + const r_dep = deps[r]; + const buf = sorter.buf; + + return l_dep.name.order(&r_dep.name, buf, buf) == .lt; + } + }; + + const NameSortCtx = struct { + buf: string, + catalog_names: [*]const String, + + pub fn lessThan(sorter: *@This(), l: usize, r: usize) bool { + const buf = sorter.buf; + const names = sorter.catalog_names; + const l_name = names[l]; + const r_name = names[r]; + + return l_name.order(&r_name, buf, buf) == .lt; + } + }; + + var dep_sort_ctx: DepSortCtx = .{ + .buf = lockfile.buffers.string_bytes.items, + .catalog_deps = lockfile.catalogs.default.values().ptr, + }; + + this.default.sort(&dep_sort_ctx); + + var iter = this.groups.iterator(); + while (iter.next()) |catalog| { + dep_sort_ctx.catalog_deps = catalog.value_ptr.values().ptr; + catalog.value_ptr.sort(&dep_sort_ctx); + } + + var name_sort_ctx: NameSortCtx = .{ + .buf = lockfile.buffers.string_bytes.items, + .catalog_names = this.groups.keys().ptr, + }; + + this.groups.sort(&name_sort_ctx); + } + + pub fn deinit(this: *CatalogMap, allocator: std.mem.Allocator) void { + this.default.deinit(allocator); + for (this.groups.values()) |*group| { + group.deinit(allocator); + } + this.groups.deinit(allocator); + } + + pub fn count(this: *CatalogMap, lockfile: *Lockfile, builder: *Lockfile.StringBuilder) void { + var deps_iter = this.default.iterator(); + while (deps_iter.next()) |entry| { + const dep_name = entry.key_ptr; + const dep = entry.value_ptr; + builder.count(dep_name.slice(lockfile.buffers.string_bytes.items)); + dep.count(lockfile.buffers.string_bytes.items, @TypeOf(builder), builder); + } + + var groups_iter = this.groups.iterator(); + while (groups_iter.next()) |catalog| { + const catalog_name = catalog.key_ptr; + builder.count(catalog_name.slice(lockfile.buffers.string_bytes.items)); + + deps_iter = catalog.value_ptr.iterator(); + while (deps_iter.next()) |entry| { + const dep_name = entry.key_ptr; + const dep = entry.value_ptr; + builder.count(dep_name.slice(lockfile.buffers.string_bytes.items)); + dep.count(lockfile.buffers.string_bytes.items, @TypeOf(builder), builder); + } + } + } + + pub fn clone(this: *CatalogMap, pm: *PackageManager, old: *Lockfile, new: *Lockfile, builder: *Lockfile.StringBuilder) OOM!CatalogMap { + var new_catalog: CatalogMap = .{}; + + try new_catalog.default.ensureTotalCapacity(new.allocator, this.default.count()); + + var deps_iter = this.default.iterator(); + while (deps_iter.next()) |entry| { + const dep_name = entry.key_ptr; + const dep = entry.value_ptr; + new_catalog.default.putAssumeCapacityContext( + builder.append(String, dep_name.slice(old.buffers.string_bytes.items)), + try dep.clone(pm, old.buffers.string_bytes.items, @TypeOf(builder), builder), + String.arrayHashContext(new, null), + ); + } + + try new_catalog.groups.ensureTotalCapacity(new.allocator, this.groups.count()); + + var groups_iter = this.groups.iterator(); + while (groups_iter.next()) |group| { + const catalog_name = group.key_ptr; + const deps = group.value_ptr; + + var new_group: Map = .{}; + try new_group.ensureTotalCapacity(new.allocator, deps.count()); + + deps_iter = deps.iterator(); + while (deps_iter.next()) |entry| { + const dep_name = entry.key_ptr; + const dep = entry.value_ptr; + new_group.putAssumeCapacityContext( + builder.append(String, dep_name.slice(old.buffers.string_bytes.items)), + try dep.clone(pm, old.buffers.string_bytes.items, @TypeOf(builder), builder), + String.arrayHashContext(new, null), + ); + } + + new_catalog.groups.putAssumeCapacityContext( + builder.append(String, catalog_name.slice(old.buffers.string_bytes.items)), + new_group, + String.arrayHashContext(new, null), + ); + } + + return new_catalog; + } +}; pub const OverrideMap = struct { const debug = Output.scoped(.OverrideMap, false); @@ -3013,6 +3383,9 @@ pub const OverrideMap = struct { /// and "here is a list of overrides depending on the package that imported" similar to PackageIndex above. pub fn get(this: *const OverrideMap, name_hash: PackageNameHash) ?Dependency.Version { debug("looking up override for {x}", .{name_hash}); + if (this.map.count() == 0) { + return null; + } return if (this.map.get(name_hash)) |dep| dep.version else @@ -4282,6 +4655,7 @@ pub const Package = extern struct { remove: u32 = 0, update: u32 = 0, overrides_changed: bool = false, + catalogs_changed: bool = false, // bool for if this dependency should be added to lockfile trusted dependencies. // it is false when the new trusted dependency is coming from the default list. @@ -4297,7 +4671,7 @@ pub const Package = extern struct { } pub inline fn hasDiffs(this: Summary) bool { - return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or + return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or this.catalogs_changed or this.added_trusted_dependencies.count() > 0 or this.removed_trusted_dependencies.count() > 0 or this.patched_dependencies_changed; @@ -4316,6 +4690,7 @@ pub const Package = extern struct { id_mapping: ?[]PackageID, ) !Summary { var summary = Summary{}; + const is_root = id_mapping != null; var to_deps = to.dependencies.get(to_lockfile.buffers.dependencies.items); const from_deps = from.dependencies.get(from_lockfile.buffers.dependencies.items); const from_resolutions = from.resolutions.get(from_lockfile.buffers.resolutions.items); @@ -4346,6 +4721,74 @@ pub const Package = extern struct { } } + if (is_root) catalogs: { + + // don't sort if lengths are different + if (from_lockfile.catalogs.default.count() != to_lockfile.catalogs.default.count()) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (from_lockfile.catalogs.groups.count() != to_lockfile.catalogs.groups.count()) { + summary.catalogs_changed = true; + break :catalogs; + } + + from_lockfile.catalogs.sort(from_lockfile); + to_lockfile.catalogs.sort(to_lockfile); + + for ( + from_lockfile.catalogs.default.keys(), + from_lockfile.catalogs.default.values(), + to_lockfile.catalogs.default.keys(), + to_lockfile.catalogs.default.values(), + ) |from_dep_name, *from_dep, to_dep_name, *to_dep| { + if (!from_dep_name.eql(to_dep_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (!from_dep.eql(to_dep, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + } + + for ( + from_lockfile.catalogs.groups.keys(), + from_lockfile.catalogs.groups.values(), + to_lockfile.catalogs.groups.keys(), + to_lockfile.catalogs.groups.values(), + ) |from_catalog_name, from_catalog_deps, to_catalog_name, to_catalog_deps| { + if (!from_catalog_name.eql(to_catalog_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (from_catalog_deps.count() != to_catalog_deps.count()) { + summary.catalogs_changed = true; + break :catalogs; + } + + for ( + from_catalog_deps.keys(), + from_catalog_deps.values(), + to_catalog_deps.keys(), + to_catalog_deps.values(), + ) |from_dep_name, *from_dep, to_dep_name, *to_dep| { + if (!from_dep_name.eql(to_dep_name, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + + if (!from_dep.eql(to_dep, from_lockfile.buffers.string_bytes.items, to_lockfile.buffers.string_bytes.items)) { + summary.catalogs_changed = true; + break :catalogs; + } + } + } + } + trusted_dependencies: { // trusted dependency diff // @@ -5522,29 +5965,31 @@ pub const Package = extern struct { // } // if (obj.get("packages")) |packages_query| { - if (packages_query.data == .e_array) { - total_dependencies_count += try processWorkspaceNamesArray( - &workspace_names, - allocator, - &pm.workspace_package_json_cache, - log, - packages_query.data.e_array, - &source, - packages_query.loc, - &string_builder, - ); - break :brk; + if (packages_query.data != .e_array) { + log.addErrorFmt(&source, packages_query.loc, allocator, + // TODO: what if we could comptime call the syntax highlighter + \\"workspaces.packages" expects an array of strings, e.g. + \\ "workspaces": {{ + \\ "packages": [ + \\ "path/to/package" + \\ ] + \\ }} + , .{}) catch {}; + return error.InvalidPackageJSON; } + total_dependencies_count += try processWorkspaceNamesArray( + &workspace_names, + allocator, + &pm.workspace_package_json_cache, + log, + packages_query.data.e_array, + &source, + packages_query.loc, + &string_builder, + ); } - log.addErrorFmt(&source, dependencies_q.loc, allocator, - // TODO: what if we could comptime call the syntax highlighter - \\Workspaces expects an array of strings, e.g. - \\ "workspaces": [ - \\ "path/to/package" - \\ ] - , .{}) catch {}; - return error.InvalidPackageJSON; + break :brk; } for (obj.properties.slice()) |item| { const key = item.key.?.asString(allocator).?; @@ -5574,7 +6019,7 @@ pub const Package = extern struct { if (group.behavior.isWorkspace()) { log.addErrorFmt(&source, dependencies_q.loc, allocator, // TODO: what if we could comptime call the syntax highlighter - \\Workspaces expects an array of strings, e.g. + \\"workspaces" expects an array of strings, e.g. \\ "workspaces": [ \\ "path/to/package" \\ ] @@ -5627,6 +6072,10 @@ pub const Package = extern struct { if (comptime features.is_main) { lockfile.overrides.parseCount(lockfile, json, &string_builder); + + if (json.get("workspaces")) |workspaces_expr| { + lockfile.catalogs.parseCount(lockfile, workspaces_expr, &string_builder); + } } try string_builder.allocate(); @@ -5989,6 +6438,9 @@ pub const Package = extern struct { // This function depends on package.dependencies being set, so it is done at the very end. if (comptime features.is_main) { try lockfile.overrides.parseAppend(pm, lockfile, package, log, source, json, &string_builder); + if (json.get("workspaces")) |workspaces_expr| { + try lockfile.catalogs.parseAppend(pm, lockfile, log, &source, workspaces_expr, &string_builder); + } } string_builder.clamp(); @@ -6252,6 +6704,7 @@ pub fn deinit(this: *Lockfile) void { this.workspace_paths.deinit(this.allocator); this.workspace_versions.deinit(this.allocator); this.overrides.deinit(this.allocator); + this.catalogs.deinit(this.allocator); } const Buffers = struct { @@ -6640,6 +7093,7 @@ pub const Serializer = struct { const has_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "tRuStEDd".*)); const has_empty_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "eMpTrUsT".*)); const has_overrides_tag: u64 = @bitCast(@as([8]u8, "oVeRriDs".*)); + const has_catalogs_tag: u64 = @bitCast(@as([8]u8, "cAtAlOgS".*)); pub fn save(this: *Lockfile, verbose_log: bool, bytes: *std.ArrayList(u8), total_size: *usize, end_pos: *usize) !void { @@ -6810,6 +7264,73 @@ pub const Serializer = struct { ); } + if (this.catalogs.hasAny()) { + try writer.writeAll(std.mem.asBytes(&has_catalogs_tag)); + + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []String, + this.catalogs.default.keys(), + ); + + var external_deps_buf: std.ArrayListUnmanaged(Dependency.External) = try .initCapacity(z_allocator, this.catalogs.default.count()); + defer external_deps_buf.deinit(z_allocator); + external_deps_buf.items.len = this.catalogs.default.count(); + for (external_deps_buf.items, this.catalogs.default.values()) |*dest, src| { + dest.* = src.toExternal(); + } + + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []Dependency.External, + external_deps_buf.items, + ); + external_deps_buf.clearRetainingCapacity(); + + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []String, + this.catalogs.groups.keys(), + ); + + for (this.catalogs.groups.values()) |catalog_deps| { + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []String, + catalog_deps.keys(), + ); + + try external_deps_buf.ensureTotalCapacity(z_allocator, catalog_deps.count()); + external_deps_buf.items.len = catalog_deps.count(); + defer external_deps_buf.clearRetainingCapacity(); + + for (external_deps_buf.items, catalog_deps.values()) |*dest, src| { + dest.* = src.toExternal(); + } + + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []Dependency.External, + external_deps_buf.items, + ); + } + } + total_size.* = try stream.getPos(); try writer.writeAll(&alignment_bytes_to_repeat_buffer); @@ -7033,6 +7554,59 @@ pub const Serializer = struct { } } + { + const remaining_in_buffer = total_buffer_size -| stream.pos; + + if (remaining_in_buffer > 8 and total_buffer_size <= stream.buffer.len) { + const next_num = try reader.readInt(u64, .little); + if (next_num == has_catalogs_tag) { + lockfile.catalogs = .{}; + + var default_dep_names = try Lockfile.Buffers.readArray(stream, allocator, std.ArrayListUnmanaged(String)); + defer default_dep_names.deinit(allocator); + + var default_deps = try Lockfile.Buffers.readArray(stream, allocator, std.ArrayListUnmanaged(Dependency.External)); + defer default_deps.deinit(allocator); + + try lockfile.catalogs.default.ensureTotalCapacity(allocator, default_deps.items.len); + + const context: Dependency.Context = .{ + .allocator = allocator, + .log = log, + .buffer = lockfile.buffers.string_bytes.items, + .package_manager = manager, + }; + + for (default_dep_names.items, default_deps.items) |dep_name, dep| { + lockfile.catalogs.default.putAssumeCapacityContext(dep_name, Dependency.toDependency(dep, context), String.arrayHashContext(lockfile, null)); + } + + var catalog_names = try Lockfile.Buffers.readArray(stream, allocator, std.ArrayListUnmanaged(String)); + defer catalog_names.deinit(allocator); + + try lockfile.catalogs.groups.ensureTotalCapacity(allocator, catalog_names.items.len); + + for (catalog_names.items) |catalog_name| { + var catalog_dep_names = try Lockfile.Buffers.readArray(stream, allocator, std.ArrayListUnmanaged(String)); + defer catalog_dep_names.deinit(allocator); + + var catalog_deps = try Lockfile.Buffers.readArray(stream, allocator, std.ArrayListUnmanaged(Dependency.External)); + defer catalog_deps.deinit(allocator); + + const group = try lockfile.catalogs.getOrPutGroup(lockfile, catalog_name); + + try group.ensureTotalCapacity(allocator, catalog_deps.items.len); + + for (catalog_dep_names.items, catalog_deps.items) |dep_name, dep| { + group.putAssumeCapacityContext(dep_name, Dependency.toDependency(dep, context), String.arrayHashContext(lockfile, null)); + } + } + } else { + stream.pos -= 8; + } + } + } + lockfile.scratch = Lockfile.Scratch.init(allocator); lockfile.package_index = PackageIndex.Map.initContext(allocator, .{}); lockfile.string_pool = StringPool.init(allocator); @@ -7531,6 +8105,18 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend try w.objectField("package_name"); try w.write(info.package_name.slice(sb)); }, + .catalog => { + try w.beginObject(); + defer w.endObject() catch {}; + + const info = dep.version.value.catalog; + + try w.objectField("name"); + try w.write(dep.name.slice(sb)); + + try w.objectField("version"); + try w.print("catalog:{s}", .{info.fmtJson(sb, .{ .quote = false })}); + }, } try w.objectField("package_id"); diff --git a/src/install/migration.zig b/src/install/migration.zig index 2cc4e9acd9..72cffc052c 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -825,6 +825,10 @@ pub fn migrateNPMLockfile( break :resolved switch (res_version.tag) { .uninitialized => std.debug.panic("Version string {s} resolved to `.uninitialized`", .{version_bytes}), + + // npm does not support catalogs + .catalog => return error.InvalidNPMLockfile, + .npm, .dist_tag => res: { // It is theoretically possible to hit this in a case where the resolved dependency is NOT // an npm dependency, but that case is so convoluted that it is not worth handling. diff --git a/src/install/resolution.zig b/src/install/resolution.zig index c409660eac..ff22d4ddd3 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -97,6 +97,13 @@ pub const Resolution = extern struct { .symlink => error.UnexpectedResolution, .folder => error.UnexpectedResolution, + // even though it's a dependency type, it's not + // possible for 'catalog:' to be written to the + // lockfile for any resolution because the install + // will fail it it's not successfully replaced by + // a version + .catalog => error.UnexpectedResolution, + // should not happen .dist_tag => error.UnexpectedResolution, .uninitialized => error.UnexpectedResolution, diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 525bca08a1..dbbd8232e3 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -927,8 +927,8 @@ pub const PackageJSON = struct { package_json.dependencies.map = DependencyMap.HashMap{}; package_json.dependencies.source_buf = json_source.contents; const ctx = String.ArrayHashContext{ - .a_buf = json_source.contents, - .b_buf = json_source.contents, + .arg_buf = json_source.contents, + .existing_buf = json_source.contents, }; package_json.dependencies.map.ensureTotalCapacityContext( allocator, diff --git a/src/semver/SemverString.zig b/src/semver/SemverString.zig index 3937fa0b36..814ba29555 100644 --- a/src/semver/SemverString.zig +++ b/src/semver/SemverString.zig @@ -7,6 +7,8 @@ pub const String = extern struct { /// 3. If the final bit is not set, then it's a string that is stored in an external buffer. bytes: [max_inline_len]u8 = [8]u8{ 0, 0, 0, 0, 0, 0, 0, 0 }, + pub const empty: String = .{}; + /// Create an inline string pub fn from(comptime inlinable_buffer: []const u8) String { comptime { @@ -212,33 +214,47 @@ pub const String = extern struct { } pub const HashContext = struct { - a_buf: []const u8, - b_buf: []const u8, + arg_buf: []const u8, + existing_buf: []const u8, - pub fn eql(ctx: HashContext, a: String, b: String) bool { - return a.eql(b, ctx.a_buf, ctx.b_buf); + pub fn eql(ctx: HashContext, arg: String, existing: String) bool { + return arg.eql(existing, ctx.arg_buf, ctx.existing_buf); } - pub fn hash(ctx: HashContext, a: String) u64 { - const str = a.slice(ctx.a_buf); + pub fn hash(ctx: HashContext, arg: String) u64 { + const str = arg.slice(ctx.arg_buf); return bun.hash(str); } }; - pub const ArrayHashContext = struct { - a_buf: []const u8, - b_buf: []const u8, + pub fn hashContext(l_lockfile: *Lockfile, r_lockfile: ?*Lockfile) HashContext { + return .{ + .arg_buf = l_lockfile.buffers.string_bytes.items, + .existing_buf = if (r_lockfile) |r| r.buffers.string_bytes.items else l_lockfile.buffers.string_bytes.items, + }; + } - pub fn eql(ctx: ArrayHashContext, a: String, b: String, _: usize) bool { - return a.eql(b, ctx.a_buf, ctx.b_buf); + pub const ArrayHashContext = struct { + arg_buf: []const u8, + existing_buf: []const u8, + + pub fn eql(ctx: ArrayHashContext, arg: String, existing: String, _: usize) bool { + return arg.eql(existing, ctx.arg_buf, ctx.existing_buf); } - pub fn hash(ctx: ArrayHashContext, a: String) u32 { - const str = a.slice(ctx.a_buf); + pub fn hash(ctx: ArrayHashContext, arg: String) u32 { + const str = arg.slice(ctx.arg_buf); return @as(u32, @truncate(bun.hash(str))); } }; + pub fn arrayHashContext(l_lockfile: *const Lockfile, r_lockfile: ?*const Lockfile) ArrayHashContext { + return .{ + .arg_buf = l_lockfile.buffers.string_bytes.items, + .existing_buf = if (r_lockfile) |r| r.buffers.string_bytes.items else l_lockfile.buffers.string_bytes.items, + }; + } + pub fn init( buf: string, in: string, diff --git a/test/cli/install/__snapshots__/bun-install.test.ts.snap b/test/cli/install/__snapshots__/bun-install.test.ts.snap index 57ba761ff3..f82188350b 100644 --- a/test/cli/install/__snapshots__/bun-install.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-install.test.ts.snap @@ -20,38 +20,16 @@ error: dependencies expects a map of specifiers, e.g. " `; -exports[`should report error on invalid format for optionalDependencies 1`] = ` -"1 | {"name":"foo","version":"0.0.1","optionalDependencies":"bar"} - ^ -error: optionalDependencies expects a map of specifiers, e.g. - "optionalDependencies": { - "bun": "latest" - } - at [dir]/package.json:1:33 -" -`; - exports[`should report error on invalid format for workspaces 1`] = ` "1 | {"name":"foo","version":"0.0.1","workspaces":{"packages":{"bar":true}}} - ^ -error: Workspaces expects an array of strings, e.g. - "workspaces": [ - "path/to/package" - ] - at [dir]/package.json:1:33 -" -`; - -exports[`should report error on duplicated workspace packages 1`] = ` -"1 | {"name":"moo","version":"0.0.3"} - ^ -error: Workspace name "moo" already exists - at [dir]/baz/package.json:1:9 - -1 | {"name":"moo","version":"0.0.2"} - ^ -note: Package name is also declared here - at [dir]/bar/package.json:1:9 + ^ +error: "workspaces.packages" expects an array of strings, e.g. + "workspaces": { + "packages": [ + "path/to/package" + ] + } + at [dir]/package.json:1:58 " `; diff --git a/test/cli/install/__snapshots__/bun-lock.test.ts.snap b/test/cli/install/__snapshots__/bun-lock.test.ts.snap index 6acb99e21a..61cfd88fdd 100644 --- a/test/cli/install/__snapshots__/bun-lock.test.ts.snap +++ b/test/cli/install/__snapshots__/bun-lock.test.ts.snap @@ -433,8 +433,8 @@ exports[`should sort overrides before comparing 1`] = ` }, }, "overrides": { - "what-bin": "1.0.0", "no-deps": "2.0.0", + "what-bin": "1.0.0", }, "packages": { "no-deps": ["no-deps@2.0.0", "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz", {}, "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g=="], @@ -470,9 +470,9 @@ exports[`should include unused resolutions in the lockfile 1`] = ` }, }, "overrides": { - "what-bin": "1.0.0", - "no-deps": "2.0.0", "jquery": "4.0.0", + "no-deps": "2.0.0", + "what-bin": "1.0.0", }, "packages": { "no-deps": ["no-deps@2.0.0", "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz", {}, "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g=="], diff --git a/test/cli/install/__snapshots__/catalogs.test.ts.snap b/test/cli/install/__snapshots__/catalogs.test.ts.snap new file mode 100644 index 0000000000..e4af894193 --- /dev/null +++ b/test/cli/install/__snapshots__/catalogs.test.ts.snap @@ -0,0 +1,103 @@ +// Bun Snapshot v1, https://goo.gl/fbAQLP + +exports[`basic detect changes (bun.lock) 1`] = ` +"{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "catalog-basic-2", + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "a-dep": "catalog:a", + "no-deps": "catalog:", + }, + }, + }, + "catalog": { + "no-deps": "2.0.0", + }, + "catalogs": { + "a": { + "a-dep": "1.0.1", + }, + }, + "packages": { + "a-dep": ["a-dep@1.0.1", "http://localhost:1234/a-dep/-/a-dep-1.0.1.tgz", {}, "sha512-6nmTaPgO2U/uOODqOhbjbnaB4xHuZ+UB7AjKUA3g2dT4WRWeNxgp0dC8Db4swXSnO5/uLLUdFmUJKINNBO/3wg=="], + + "no-deps": ["no-deps@2.0.0", "http://localhost:1234/no-deps/-/no-deps-2.0.0.tgz", {}, "sha512-W3duJKZPcMIG5rA1io5cSK/bhW9rWFz+jFxZsKS/3suK4qHDkQNxUTEXee9/hTaAoDCeHWQqogukWYKzfr6X4g=="], + + "pkg1": ["pkg1@workspace:packages/pkg1"], + } +} +" +`; + +exports[`basic detect changes (bun.lock) 2`] = ` +"{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "catalog-basic-2", + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "a-dep": "catalog:a", + "no-deps": "catalog:", + }, + }, + }, + "catalog": { + "no-deps": "1.0.0", + }, + "catalogs": { + "a": { + "a-dep": "1.0.1", + }, + }, + "packages": { + "a-dep": ["a-dep@1.0.1", "http://localhost:1234/a-dep/-/a-dep-1.0.1.tgz", {}, "sha512-6nmTaPgO2U/uOODqOhbjbnaB4xHuZ+UB7AjKUA3g2dT4WRWeNxgp0dC8Db4swXSnO5/uLLUdFmUJKINNBO/3wg=="], + + "no-deps": ["no-deps@1.0.0", "http://localhost:1234/no-deps/-/no-deps-1.0.0.tgz", {}, "sha512-v4w12JRjUGvfHDUP8vFDwu0gUWu04j0cv9hLb1Abf9VdaXu4XcrddYFTMVBVvmldKViGWH7jrb6xPJRF0wq6gw=="], + + "pkg1": ["pkg1@workspace:packages/pkg1"], + } +} +" +`; + +exports[`basic detect changes (bun.lock) 3`] = ` +"{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "catalog-basic-2", + }, + "packages/pkg1": { + "name": "pkg1", + "dependencies": { + "a-dep": "catalog:a", + "no-deps": "catalog:", + }, + }, + }, + "catalog": { + "no-deps": "1.0.0", + }, + "catalogs": { + "a": { + "a-dep": "1.0.10", + }, + }, + "packages": { + "a-dep": ["a-dep@1.0.10", "http://localhost:1234/a-dep/-/a-dep-1.0.10.tgz", {}, "sha512-NeQ6Ql9jRW8V+VOiVb+PSQAYOvVoSimW+tXaR0CoJk4kM9RIk/XlAUGCsNtn5XqjlDO4hcH8NcyaL507InevEg=="], + + "no-deps": ["no-deps@1.0.0", "http://localhost:1234/no-deps/-/no-deps-1.0.0.tgz", {}, "sha512-v4w12JRjUGvfHDUP8vFDwu0gUWu04j0cv9hLb1Abf9VdaXu4XcrddYFTMVBVvmldKViGWH7jrb6xPJRF0wq6gw=="], + + "pkg1": ["pkg1@workspace:packages/pkg1"], + } +} +" +`; diff --git a/test/cli/install/catalogs.test.ts b/test/cli/install/catalogs.test.ts new file mode 100644 index 0000000000..3d537f3e61 --- /dev/null +++ b/test/cli/install/catalogs.test.ts @@ -0,0 +1,221 @@ +import { file, spawn, write } from "bun"; +import { afterAll, beforeAll, describe, expect, test } from "bun:test"; +import { exists } from "fs/promises"; +import { VerdaccioRegistry, bunEnv, bunExe, runBunInstall, stderrForInstall } from "harness"; +import { join } from "path"; + +var registry = new VerdaccioRegistry(); + +beforeAll(async () => { + await registry.start(); +}); + +afterAll(() => { + registry.stop(); +}); + +describe("basic", () => { + async function createBasicCatalogMonorepo(packageDir: string, name: string) { + const packageJson = { + name, + workspaces: { + packages: ["packages/*"], + catalog: { + "no-deps": "2.0.0", + }, + catalogs: { + a: { + "a-dep": "1.0.1", + }, + }, + }, + }; + + await Promise.all([ + write(join(packageDir, "package.json"), JSON.stringify(packageJson)), + write( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "no-deps": "catalog:", + "a-dep": "catalog:a", + }, + }), + ), + ]); + + return packageJson; + } + test("both catalog and catalogs", async () => { + const { packageDir } = await registry.createTestDir(); + + await createBasicCatalogMonorepo(packageDir, "catalog-basic-1"); + + await runBunInstall(bunEnv, packageDir); + + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ + name: "no-deps", + version: "2.0.0", + }); + + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toEqual({ + name: "a-dep", + version: "1.0.1", + }); + + // another install does not save the lockfile + await runBunInstall(bunEnv, packageDir, { savesLockfile: false }); + }); + + for (const binaryLockfile of [true, false]) { + test(`detect changes (${binaryLockfile ? "bun.lockb" : "bun.lock"})`, async () => { + const { packageDir } = await registry.createTestDir({ saveTextLockfile: !binaryLockfile }); + const packageJson = await createBasicCatalogMonorepo(packageDir, "catalog-basic-2"); + let { err } = await runBunInstall(bunEnv, packageDir); + expect(err).toContain("Saved lockfile"); + + const initialLockfile = !binaryLockfile + ? (await file(join(packageDir, "bun.lock")).text()).replaceAll(/localhost:\d+/g, "localhost:1234") + : undefined; + + if (!binaryLockfile) { + expect(initialLockfile).toMatchSnapshot(); + } else { + expect(await exists(join(packageDir, "bun.lockb"))).toBeTrue(); + } + + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ + name: "no-deps", + version: "2.0.0", + }); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toEqual({ + name: "a-dep", + version: "1.0.1", + }); + + // update catalog + packageJson.workspaces.catalog["no-deps"] = "1.0.0"; + await write(join(packageDir, "package.json"), JSON.stringify(packageJson)); + ({ err } = await runBunInstall(bunEnv, packageDir, { savesLockfile: true })); + expect(err).toContain("Saved lockfile"); + + if (!binaryLockfile) { + const newLockfile = (await file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + + expect(newLockfile).not.toEqual(initialLockfile); + expect(newLockfile).toMatchSnapshot(); + } else { + expect(await exists(join(packageDir, "bun.lockb"))).toBeTrue(); + } + + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ + name: "no-deps", + version: "1.0.0", + }); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toEqual({ + name: "a-dep", + version: "1.0.1", + }); + + // update catalogs + packageJson.workspaces.catalogs.a["a-dep"] = "1.0.10"; + await write(join(packageDir, "package.json"), JSON.stringify(packageJson)); + ({ err } = await runBunInstall(bunEnv, packageDir, { savesLockfile: true })); + expect(err).toContain("Saved lockfile"); + + if (!binaryLockfile) { + const newLockfile = (await file(join(packageDir, "bun.lock")).text()).replaceAll( + /localhost:\d+/g, + "localhost:1234", + ); + + expect(newLockfile).not.toEqual(initialLockfile); + expect(newLockfile).toMatchSnapshot(); + } else { + expect(await exists(join(packageDir, "bun.lockb"))).toBeTrue(); + } + + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ + name: "no-deps", + version: "1.0.0", + }); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toEqual({ + name: "a-dep", + version: "1.0.10", + }); + }); + } +}); + +describe("errors", () => { + test("fails gracefully when no catalog is found for a package", async () => { + const { packageDir, packageJson } = await registry.createTestDir(); + + await write( + packageJson, + JSON.stringify({ + name: "catalog-error-1", + workspaces: { + // empty, any catalog should fail to resolve + catalog: {}, + catalogs: {}, + }, + dependencies: { + "no-deps": "catalog:", + + // longer than 8 + "a-dep": "catalog:aaaaaaaaaaaaaaaaa", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: bunEnv, + }); + + const out = await Bun.readableStreamToText(stdout); + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + + expect(err).toContain("no-deps@catalog: failed to resolve"); + expect(err).toContain("a-dep@catalog:aaaaaaaaaaaaaaaaa failed to resolve"); + }); + + test("invalid dependency version", async () => { + const { packageDir, packageJson } = await registry.createTestDir(); + await write( + packageJson, + JSON.stringify({ + name: "catalog-error-2", + workspaces: { + catalog: { + "no-deps": ".:", + }, + }, + dependencies: { + "no-deps": "catalog:", + }, + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: bunEnv, + }); + + const out = await Bun.readableStreamToText(stdout); + const err = stderrForInstall(await Bun.readableStreamToText(stderr)); + + expect(err).toContain("no-deps@catalog: failed to resolve"); + }); +});