mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 06:12:08 +00:00
Compare commits
13 Commits
jarred/nes
...
claude/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e38300621b | ||
|
|
540e0151c7 | ||
|
|
a10785b45b | ||
|
|
2525a9e5c3 | ||
|
|
19ff217be6 | ||
|
|
6263db7d57 | ||
|
|
f180b08927 | ||
|
|
7323081d67 | ||
|
|
4aa2fb3573 | ||
|
|
e06141bbdc | ||
|
|
1c329161c9 | ||
|
|
86392c0d09 | ||
|
|
2f929f16a2 |
@@ -65,6 +65,7 @@ pub const install_params: []const ParamType = &(shared_params ++ [_]ParamType{
|
||||
clap.parseParam("--filter <STR>... Install packages for the matching workspaces") catch unreachable,
|
||||
clap.parseParam("-a, --analyze Analyze & install all dependencies of files passed as arguments recursively (using Bun's bundler)") catch unreachable,
|
||||
clap.parseParam("--only-missing Only add dependencies to package.json if they are not already present") catch unreachable,
|
||||
clap.parseParam("--catalog <STR>? Use catalog for version (optionally specify catalog name)") catch unreachable,
|
||||
clap.parseParam("<POS> ... ") catch unreachable,
|
||||
});
|
||||
|
||||
@@ -101,6 +102,7 @@ pub const add_params: []const ParamType = &(shared_params ++ [_]ParamType{
|
||||
clap.parseParam("-E, --exact Add the exact version instead of the ^range") catch unreachable,
|
||||
clap.parseParam("-a, --analyze Recursively analyze & install dependencies of files passed as arguments (using Bun's bundler)") catch unreachable,
|
||||
clap.parseParam("--only-missing Only add dependencies to package.json if they are not already present") catch unreachable,
|
||||
clap.parseParam("--catalog <STR>? Use catalog for version (optionally specify catalog name)") catch unreachable,
|
||||
clap.parseParam("<POS> ... \"name\" or \"name@version\" of package(s) to install") catch unreachable,
|
||||
});
|
||||
|
||||
@@ -214,6 +216,8 @@ omit: ?Omit = null,
|
||||
|
||||
exact: bool = false,
|
||||
|
||||
catalog_name: ?string = null,
|
||||
|
||||
concurrent_scripts: ?usize = null,
|
||||
|
||||
patch: PatchOpts = .{ .nothing = .{} },
|
||||
@@ -446,6 +450,10 @@ pub fn printHelp(subcommand: Subcommand) void {
|
||||
\\ <b><green>bun add<r> <cyan>--optional<r> <blue>lodash<r>
|
||||
\\ <b><green>bun add<r> <cyan>--peer<r> <blue>esbuild<r>
|
||||
\\
|
||||
\\ <d>Add with catalog reference (monorepo version management)<r>
|
||||
\\ <b><green>bun add<r> <cyan>--catalog<r> <blue>react<r>
|
||||
\\ <b><green>bun add<r> <cyan>--catalog=dev<r> <blue>typescript<r>
|
||||
\\
|
||||
\\Full documentation is available at <magenta>https://bun.com/docs/cli/add<r>.
|
||||
\\
|
||||
;
|
||||
@@ -1024,6 +1032,9 @@ pub fn parse(allocator: std.mem.Allocator, comptime subcommand: Subcommand) !Com
|
||||
cli.exact = args.flag("--exact");
|
||||
cli.analyze = args.flag("--analyze");
|
||||
cli.only_missing = args.flag("--only-missing");
|
||||
if (comptime subcommand == .add or subcommand == .install) {
|
||||
cli.catalog_name = args.option("--catalog");
|
||||
}
|
||||
}
|
||||
|
||||
if (args.option("--concurrent-scripts")) |concurrency| {
|
||||
|
||||
@@ -5,12 +5,124 @@ const dependency_groups = &.{
|
||||
.{ "peerDependencies", .{ .peer = true } },
|
||||
};
|
||||
|
||||
const CATALOG_PREFIX = "catalog:";
|
||||
|
||||
pub const EditOptions = struct {
|
||||
exact_versions: bool = false,
|
||||
add_trusted_dependencies: bool = false,
|
||||
before_install: bool = false,
|
||||
catalog_name: ?string = null,
|
||||
};
|
||||
|
||||
/// Validates and formats a catalog reference string.
|
||||
/// Returns null if the catalog name is invalid (contains whitespace or ':').
|
||||
fn formatCatalogReference(allocator: std.mem.Allocator, catalog_name: ?string) !?string {
|
||||
const catalog = catalog_name orelse return null;
|
||||
const trimmed = strings.trim(catalog, " \t\r\n");
|
||||
|
||||
// Validate: catalog name must not contain whitespace or ':'
|
||||
for (trimmed) |c| {
|
||||
if (c == ':' or std.ascii.isWhitespace(c)) {
|
||||
return null; // Invalid catalog name
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmed.len == 0) {
|
||||
return try allocator.dupe(u8, CATALOG_PREFIX);
|
||||
}
|
||||
return try std.fmt.allocPrint(allocator, "{s}{s}", .{ CATALOG_PREFIX, trimmed });
|
||||
}
|
||||
|
||||
pub fn editCatalog(
|
||||
manager: *PackageManager,
|
||||
package_json: *Expr,
|
||||
updates: []UpdateRequest,
|
||||
catalog_name: ?string,
|
||||
) !void {
|
||||
const allocator = manager.allocator;
|
||||
|
||||
if (catalog_name) |name| {
|
||||
if (name.len > 0) {
|
||||
// Named catalog: catalogs.<name>
|
||||
var catalogs = brk: {
|
||||
if (package_json.asProperty("catalogs")) |query| {
|
||||
if (query.expr.data == .e_object)
|
||||
break :brk query.expr.data.e_object.*;
|
||||
}
|
||||
break :brk E.Object{};
|
||||
};
|
||||
|
||||
var named_catalog = brk: {
|
||||
for (catalogs.properties.slice()) |prop| {
|
||||
if (prop.key) |key| {
|
||||
if (key.data == .e_string and strings.eqlLong(key.data.e_string.data, name, true)) {
|
||||
if (prop.value) |val| {
|
||||
if (val.data == .e_object) {
|
||||
break :brk val.data.e_object.*;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break :brk E.Object{};
|
||||
};
|
||||
|
||||
// Add packages to named catalog
|
||||
const resolutions = manager.lockfile.packages.items(.resolution);
|
||||
for (updates) |request| {
|
||||
if (request.package_id < resolutions.len and resolutions[request.package_id].tag == .npm) {
|
||||
const version_str = try std.fmt.allocPrint(
|
||||
allocator,
|
||||
"^{f}",
|
||||
.{resolutions[request.package_id].value.npm.version.fmt(manager.lockfile.buffers.string_bytes.items)},
|
||||
);
|
||||
const version_expr = try Expr.init(E.String, E.String{ .data = version_str }, logger.Loc.Empty).clone(allocator);
|
||||
try named_catalog.put(allocator, request.getResolvedName(manager.lockfile), version_expr);
|
||||
}
|
||||
}
|
||||
|
||||
// Update catalogs.<name>
|
||||
const named_catalog_expr = try Expr.init(E.Object, named_catalog, logger.Loc.Empty).clone(allocator);
|
||||
try catalogs.put(allocator, name, named_catalog_expr);
|
||||
|
||||
try package_json.data.e_object.put(
|
||||
allocator,
|
||||
"catalogs",
|
||||
try Expr.init(E.Object, catalogs, logger.Loc.Empty).clone(allocator),
|
||||
);
|
||||
} else {
|
||||
// Default catalog
|
||||
var catalog = brk: {
|
||||
if (package_json.asProperty("catalog")) |query| {
|
||||
if (query.expr.data == .e_object)
|
||||
break :brk query.expr.data.e_object.*;
|
||||
}
|
||||
break :brk E.Object{};
|
||||
};
|
||||
|
||||
// Add packages to catalog
|
||||
const resolutions = manager.lockfile.packages.items(.resolution);
|
||||
for (updates) |request| {
|
||||
if (request.package_id < resolutions.len and resolutions[request.package_id].tag == .npm) {
|
||||
const version_str = try std.fmt.allocPrint(
|
||||
allocator,
|
||||
"^{f}",
|
||||
.{resolutions[request.package_id].value.npm.version.fmt(manager.lockfile.buffers.string_bytes.items)},
|
||||
);
|
||||
const version_expr = try Expr.init(E.String, E.String{ .data = version_str }, logger.Loc.Empty).clone(allocator);
|
||||
try catalog.put(allocator, request.getResolvedName(manager.lockfile), version_expr);
|
||||
}
|
||||
}
|
||||
|
||||
try package_json.data.e_object.put(
|
||||
allocator,
|
||||
"catalog",
|
||||
try Expr.init(E.Object, catalog, logger.Loc.Empty).clone(allocator),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn editPatchedDependencies(
|
||||
manager: *PackageManager,
|
||||
package_json: *Expr,
|
||||
@@ -677,6 +789,18 @@ pub fn edit(
|
||||
const resolutions = if (!options.before_install) manager.lockfile.packages.items(.resolution) else &.{};
|
||||
for (updates.*) |*request| {
|
||||
if (request.e_string) |e_string| {
|
||||
// If catalog is specified, use catalog reference
|
||||
if (options.catalog_name != null) {
|
||||
if (try formatCatalogReference(allocator, options.catalog_name)) |catalog_ref| {
|
||||
e_string.data = catalog_ref;
|
||||
continue;
|
||||
} else {
|
||||
// Invalid catalog name - log error and skip catalog reference
|
||||
Output.errGeneric("Invalid catalog name: contains whitespace or ':'", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.package_id >= resolutions.len or resolutions[request.package_id].tag == .uninitialized) {
|
||||
e_string.data = uninitialized: {
|
||||
if (manager.subcommand == .update and manager.options.do.update_to_latest) {
|
||||
@@ -783,6 +907,8 @@ const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const Output = bun.Output;
|
||||
const Semver = bun.Semver;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
|
||||
@@ -88,6 +88,9 @@ cpu: Npm.Architecture = Npm.Architecture.current,
|
||||
/// Override OS for optional dependencies filtering
|
||||
os: Npm.OperatingSystem = Npm.OperatingSystem.current,
|
||||
|
||||
/// Catalog name for bun add --catalog
|
||||
catalog_name: ?string = null,
|
||||
|
||||
config_version: ?bun.ConfigVersion = null,
|
||||
|
||||
pub const PublishConfig = struct {
|
||||
@@ -570,6 +573,8 @@ pub fn load(
|
||||
this.minimum_release_age_ms = min_age_ms;
|
||||
}
|
||||
|
||||
this.catalog_name = cli.catalog_name;
|
||||
|
||||
this.lockfile_only = cli.lockfile_only;
|
||||
|
||||
if (cli.lockfile_only) {
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
fn writePackageJSONToDisk(path: [:0]const u8, content: []const u8) !void {
|
||||
const file = try bun.sys.File.openat(bun.FD.cwd(), path, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664).unwrap();
|
||||
defer file.close();
|
||||
try file.writeAll(content).unwrap();
|
||||
}
|
||||
|
||||
pub fn updatePackageJSONAndInstallWithManager(
|
||||
manager: *PackageManager,
|
||||
ctx: Command.Context,
|
||||
@@ -339,6 +345,83 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
|
||||
try manager.installWithManager(ctx, root_package_json_path, original_cwd);
|
||||
|
||||
if (manager.options.catalog_name != null and (subcommand == .add or subcommand == .install)) {
|
||||
const is_workspace = !strings.eql(manager.original_package_json_path, root_package_json_path);
|
||||
|
||||
if (is_workspace) {
|
||||
const root_package_json_entry = manager.workspace_package_json_cache.getWithPath(
|
||||
manager.allocator,
|
||||
manager.log,
|
||||
root_package_json_path,
|
||||
.{},
|
||||
).unwrap() catch |err| {
|
||||
Output.err(err, "failed to read/parse root package.json at '{s}'", .{root_package_json_path});
|
||||
Global.exit(1);
|
||||
};
|
||||
|
||||
try PackageJSONEditor.editCatalog(
|
||||
manager,
|
||||
&root_package_json_entry.root,
|
||||
updates.*,
|
||||
manager.options.catalog_name,
|
||||
);
|
||||
|
||||
var root_buffer_writer = JSPrinter.BufferWriter.init(manager.allocator);
|
||||
try root_buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, root_package_json_entry.source.contents.len + 256);
|
||||
root_buffer_writer.append_newline = root_package_json_entry.source.contents.len > 0 and
|
||||
root_package_json_entry.source.contents[root_package_json_entry.source.contents.len - 1] == '\n';
|
||||
var root_package_json_writer = JSPrinter.BufferPrinter.init(root_buffer_writer);
|
||||
|
||||
_ = JSPrinter.printJSON(
|
||||
@TypeOf(&root_package_json_writer),
|
||||
&root_package_json_writer,
|
||||
root_package_json_entry.root,
|
||||
&root_package_json_entry.source,
|
||||
.{
|
||||
.indent = root_package_json_entry.indentation,
|
||||
.mangled_props = null,
|
||||
},
|
||||
) catch |err| {
|
||||
Output.prettyErrorln("root package.json failed to write due to error {s}", .{@errorName(err)});
|
||||
Global.crash();
|
||||
};
|
||||
|
||||
if (manager.options.do.write_package_json) {
|
||||
try writePackageJSONToDisk(root_package_json_path, root_package_json_writer.ctx.writtenWithoutTrailingZero());
|
||||
}
|
||||
} else {
|
||||
try PackageJSONEditor.editCatalog(
|
||||
manager,
|
||||
¤t_package_json.root,
|
||||
updates.*,
|
||||
manager.options.catalog_name,
|
||||
);
|
||||
|
||||
var catalog_buffer_writer = JSPrinter.BufferWriter.init(manager.allocator);
|
||||
try catalog_buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, current_package_json.source.contents.len + 256);
|
||||
catalog_buffer_writer.append_newline = current_package_json.source.contents.len > 0 and
|
||||
current_package_json.source.contents[current_package_json.source.contents.len - 1] == '\n';
|
||||
var catalog_package_json_writer = JSPrinter.BufferPrinter.init(catalog_buffer_writer);
|
||||
|
||||
_ = JSPrinter.printJSON(
|
||||
@TypeOf(&catalog_package_json_writer),
|
||||
&catalog_package_json_writer,
|
||||
current_package_json.root,
|
||||
¤t_package_json.source,
|
||||
.{
|
||||
.indent = current_package_json.indentation,
|
||||
.mangled_props = null,
|
||||
},
|
||||
) catch |err| {
|
||||
Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
|
||||
Global.crash();
|
||||
};
|
||||
|
||||
new_package_json_source = try manager.allocator.dupe(u8, catalog_package_json_writer.ctx.writtenWithoutTrailingZero());
|
||||
current_package_json.source.contents = new_package_json_source;
|
||||
}
|
||||
}
|
||||
|
||||
if (subcommand == .update or subcommand == .add or subcommand == .link) {
|
||||
for (updates.*) |request| {
|
||||
if (request.failed) {
|
||||
@@ -373,6 +456,7 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
.{
|
||||
.exact_versions = manager.options.enable.exact_versions,
|
||||
.add_trusted_dependencies = manager.options.do.trust_dependencies_from_args,
|
||||
.catalog_name = manager.options.catalog_name,
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -416,16 +500,7 @@ fn updatePackageJSONAndInstallWithManagerWithUpdates(
|
||||
|
||||
// Now that we've run the install step
|
||||
// We can save our in-memory package.json to disk
|
||||
const workspace_package_json_file = (try bun.sys.File.openat(
|
||||
.cwd(),
|
||||
path,
|
||||
bun.O.RDWR,
|
||||
0,
|
||||
).unwrap()).handle.stdFile();
|
||||
|
||||
try workspace_package_json_file.pwriteAll(source, 0);
|
||||
std.posix.ftruncate(workspace_package_json_file.handle, source.len) catch {};
|
||||
workspace_package_json_file.close();
|
||||
try writePackageJSONToDisk(path, source);
|
||||
|
||||
if (subcommand == .remove) {
|
||||
if (!any_changes) {
|
||||
|
||||
261
test/cli/install/bun-add-catalog.test.ts
Normal file
261
test/cli/install/bun-add-catalog.test.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import { file, spawn } from "bun";
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, test } from "bun:test";
|
||||
import { existsSync } from "fs";
|
||||
import { mkdir } from "fs/promises";
|
||||
import { bunExe, bunEnv as env } from "harness";
|
||||
import { join } from "path";
|
||||
import {
|
||||
dummyAfterAll,
|
||||
dummyAfterEach,
|
||||
dummyBeforeAll,
|
||||
dummyBeforeEach,
|
||||
dummyRegistry,
|
||||
package_dir,
|
||||
setHandler,
|
||||
} from "./dummy.registry";
|
||||
|
||||
beforeAll(dummyBeforeAll);
|
||||
afterAll(dummyAfterAll);
|
||||
|
||||
beforeEach(async () => {
|
||||
await dummyBeforeEach();
|
||||
});
|
||||
|
||||
afterEach(dummyAfterEach);
|
||||
|
||||
describe("bun add --catalog", () => {
|
||||
test("bun add --catalog adds dependency with catalog reference and populates catalog", async () => {
|
||||
// Set up handler for bar package (bar-0.0.2.tgz exists in test/cli/install/)
|
||||
setHandler(dummyRegistry([], { "0.0.2": {} }));
|
||||
|
||||
// Create initial package.json WITHOUT catalog - it should be created
|
||||
await Bun.write(join(package_dir, "package.json"), JSON.stringify({ name: "test-catalog-add" }, null, 2));
|
||||
|
||||
// Run bun add --catalog bar
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog", "bar"],
|
||||
cwd: package_dir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
// The add command should succeed
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify node_modules WAS created (--catalog installs to resolve versions)
|
||||
expect(existsSync(join(package_dir, "node_modules"))).toBe(true);
|
||||
|
||||
// Check that package.json was updated with catalog reference
|
||||
const updatedPackageJson = await file(join(package_dir, "package.json")).json();
|
||||
expect(updatedPackageJson.dependencies).toEqual({
|
||||
bar: "catalog:",
|
||||
});
|
||||
|
||||
// Verify catalog was created with the resolved version
|
||||
expect(updatedPackageJson.catalog).toBeDefined();
|
||||
expect(updatedPackageJson.catalog["bar"]).toMatch(/^\^0\.0\.2$/);
|
||||
});
|
||||
|
||||
test("bun add --catalog=name adds dependency with named catalog reference and populates catalog", async () => {
|
||||
// Set up handler for baz package (baz-0.0.3.tgz exists in test/cli/install/)
|
||||
setHandler(dummyRegistry([], { "0.0.3": {} }));
|
||||
|
||||
// Create initial package.json WITHOUT named catalog - it should be created
|
||||
await Bun.write(join(package_dir, "package.json"), JSON.stringify({ name: "test-catalog-add-named" }, null, 2));
|
||||
|
||||
// Run bun add --catalog=dev baz
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog=dev", "baz"],
|
||||
cwd: package_dir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that package.json was updated with named catalog reference
|
||||
const updatedPackageJson = await file(join(package_dir, "package.json")).json();
|
||||
expect(updatedPackageJson.dependencies).toEqual({
|
||||
baz: "catalog:dev",
|
||||
});
|
||||
|
||||
// Verify named catalog was created with the resolved version
|
||||
expect(updatedPackageJson.catalogs).toBeDefined();
|
||||
expect(updatedPackageJson.catalogs.dev).toBeDefined();
|
||||
expect(updatedPackageJson.catalogs.dev["baz"]).toMatch(/^\^0\.0\.3$/);
|
||||
});
|
||||
|
||||
test("bun add --catalog with --dev flag", async () => {
|
||||
// Set up handler for bar package
|
||||
setHandler(dummyRegistry([], { "0.0.2": {} }));
|
||||
|
||||
// Create initial package.json
|
||||
await Bun.write(join(package_dir, "package.json"), JSON.stringify({ name: "test-catalog-add-dev" }, null, 2));
|
||||
|
||||
// Run bun add --catalog --dev bar
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog", "--dev", "bar"],
|
||||
cwd: package_dir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that package.json was updated with catalog reference in devDependencies
|
||||
const updatedPackageJson = await file(join(package_dir, "package.json")).json();
|
||||
expect(updatedPackageJson.devDependencies).toEqual({
|
||||
bar: "catalog:",
|
||||
});
|
||||
|
||||
// Verify catalog was created
|
||||
expect(updatedPackageJson.catalog).toBeDefined();
|
||||
expect(updatedPackageJson.catalog["bar"]).toMatch(/^\^0\.0\.2$/);
|
||||
});
|
||||
|
||||
test("bun add --catalog works in monorepo workspace", async () => {
|
||||
// Set up handler for bar package
|
||||
setHandler(dummyRegistry([], { "0.0.2": {} }));
|
||||
|
||||
// Create root package.json without catalog
|
||||
await Bun.write(
|
||||
join(package_dir, "package.json"),
|
||||
JSON.stringify({ name: "monorepo-root", workspaces: ["packages/*"] }, null, 2),
|
||||
);
|
||||
|
||||
// Create workspace package
|
||||
const workspaceDir = join(package_dir, "packages", "pkg1");
|
||||
await mkdir(workspaceDir, { recursive: true });
|
||||
await Bun.write(join(workspaceDir, "package.json"), JSON.stringify({ name: "pkg1" }, null, 2));
|
||||
|
||||
// Run bun add --catalog from workspace directory
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog", "bar"],
|
||||
cwd: workspaceDir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that workspace package.json was updated with catalog reference
|
||||
const updatedWorkspacePackageJson = await file(join(workspaceDir, "package.json")).json();
|
||||
expect(updatedWorkspacePackageJson.dependencies).toEqual({
|
||||
bar: "catalog:",
|
||||
});
|
||||
|
||||
// Verify root package.json catalog was created
|
||||
const updatedRootPackageJson = await file(join(package_dir, "package.json")).json();
|
||||
expect(updatedRootPackageJson.catalog).toBeDefined();
|
||||
expect(updatedRootPackageJson.catalog["bar"]).toMatch(/^\^0\.0\.2$/);
|
||||
});
|
||||
|
||||
test("bun add --catalog multiple packages", async () => {
|
||||
// Set up handler that handles multiple packages
|
||||
setHandler(req => {
|
||||
const url = req.url;
|
||||
if (url.includes("bar")) {
|
||||
return dummyRegistry([], { "0.0.2": {} })(req);
|
||||
} else if (url.includes("baz")) {
|
||||
return dummyRegistry([], { "0.0.3": {} })(req);
|
||||
}
|
||||
return new Response("Not found", { status: 404 });
|
||||
});
|
||||
|
||||
// Create initial package.json
|
||||
await Bun.write(join(package_dir, "package.json"), JSON.stringify({ name: "test-catalog-add-multiple" }, null, 2));
|
||||
|
||||
// Run bun add --catalog with multiple packages
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog", "bar", "baz"],
|
||||
cwd: package_dir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that package.json was updated with catalog references for both
|
||||
const updatedPackageJson = await file(join(package_dir, "package.json")).json();
|
||||
expect(updatedPackageJson.dependencies).toEqual({
|
||||
bar: "catalog:",
|
||||
baz: "catalog:",
|
||||
});
|
||||
|
||||
// Verify catalog was created with both packages
|
||||
expect(updatedPackageJson.catalog).toBeDefined();
|
||||
expect(updatedPackageJson.catalog["bar"]).toMatch(/^\^0\.0\.2$/);
|
||||
expect(updatedPackageJson.catalog["baz"]).toMatch(/^\^0\.0\.3$/);
|
||||
});
|
||||
|
||||
test("bun add --catalog --no-save does not modify package.json", async () => {
|
||||
// Set up handler for bar package
|
||||
setHandler(dummyRegistry([], { "0.0.2": {} }));
|
||||
|
||||
// Create initial package.json
|
||||
const initialContent = JSON.stringify({ name: "test-no-save" }, null, 2);
|
||||
await Bun.write(join(package_dir, "package.json"), initialContent);
|
||||
|
||||
// Run bun add --catalog --no-save bar
|
||||
await using proc = spawn({
|
||||
cmd: [bunExe(), "add", "--catalog", "--no-save", "bar"],
|
||||
cwd: package_dir,
|
||||
env,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log("stdout:", stdout);
|
||||
console.log("stderr:", stderr);
|
||||
}
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify package.json was not modified
|
||||
const finalContent = await file(join(package_dir, "package.json")).text();
|
||||
expect(finalContent).toBe(initialContent);
|
||||
});
|
||||
});
|
||||
@@ -8,7 +8,7 @@
|
||||
".jsBoolean(false)": 0,
|
||||
".jsBoolean(true)": 0,
|
||||
".stdDir()": 42,
|
||||
".stdFile()": 16,
|
||||
".stdFile()": 15,
|
||||
"// autofix": 148,
|
||||
": [^=]+= undefined,$": 256,
|
||||
"== alloc.ptr": 0,
|
||||
|
||||
Reference in New Issue
Block a user