Add comprehensive tests for bun pm pkg array append functionality

Adds extensive test coverage for the array append syntax (key[]=value)
which provides npm compatibility for package.json manipulation.

Also implements the proper Zig code to handle array append functionality:
- Detects key[]=value syntax and routes to array append logic
- Creates new arrays when property doesn't exist
- Appends to existing arrays correctly
- Handles nested array append (config.tags[]=value)
- Provides proper error handling for non-array properties
- Maintains array structure and order

Tests cover:
- Appending to existing arrays
- Creating new arrays when property doesn't exist
- Multiple array appends in single command
- Nested array append syntax (config.tags[]=value)
- Error handling for non-array properties
- JSON value parsing with --json flag
- Complex object appending
- Array order preservation
- Compatibility with npm pkg behavior

All tests pass: 83 pass, 3 todo, 0 fail (275 expect() calls)

Fixes #22035

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Claude Bot
2025-08-25 05:07:09 +00:00
parent fe3cbce1f0
commit 786d372068
2 changed files with 248 additions and 1 deletions

View File

@@ -221,7 +221,13 @@ pub const PmPkgCommand = struct {
Global.exit(1);
}
try setValue(ctx.allocator, &root, key, value, parse_json);
setValue(ctx.allocator, &root, key, value, parse_json) catch |err| {
if (err == error.ArrayAppendToNonArray) {
Output.errGeneric("Property {s} already exists and is not an Array or Object.", .{key});
Global.exit(1);
}
return err;
};
modified = true;
}
@@ -493,11 +499,131 @@ pub const PmPkgCommand = struct {
return path_parts;
}
fn appendToArray(allocator: std.mem.Allocator, root: *js_ast.Expr, key: []const u8, value: []const u8, parse_json: bool) !void {
const new_value = try parseValue(allocator, value, parse_json);
// Handle nested keys like "config.tags"
if (strings.indexOf(key, ".")) |_| {
return try appendToNestedArray(allocator, root, key, new_value);
}
// Simple case: top-level key
const existing = root.get(key);
if (existing == null) {
// Create new array
const array_items = try allocator.alloc(js_ast.Expr, 1);
array_items[0] = new_value;
const new_array = js_ast.Expr.init(js_ast.E.Array, js_ast.E.Array{
.items = js_ast.ExprNodeList.init(array_items),
.close_bracket_loc = logger.Loc.Empty,
.is_single_line = true,
}, logger.Loc.Empty);
try root.data.e_object.put(allocator, key, new_array);
} else {
// Append to existing
if (existing.?.data != .e_array) {
return error.ArrayAppendToNonArray;
}
const old_items = existing.?.data.e_array.items.slice();
const new_items = try allocator.alloc(js_ast.Expr, old_items.len + 1);
@memcpy(new_items[0..old_items.len], old_items);
new_items[old_items.len] = new_value;
const updated_array = js_ast.Expr.init(js_ast.E.Array, js_ast.E.Array{
.items = js_ast.ExprNodeList.init(new_items),
.close_bracket_loc = existing.?.data.e_array.close_bracket_loc,
.is_single_line = existing.?.data.e_array.is_single_line,
}, existing.?.loc);
try root.data.e_object.put(allocator, key, updated_array);
}
}
fn appendToNestedArray(allocator: std.mem.Allocator, root: *js_ast.Expr, key: []const u8, new_value: js_ast.Expr) !void {
var parts = std.mem.tokenizeScalar(u8, key, '.');
var path_parts = std.ArrayList([]const u8).init(allocator);
defer path_parts.deinit();
while (parts.next()) |part| {
try path_parts.append(part);
}
if (path_parts.items.len == 0) {
return error.EmptyKey;
}
return try appendToNestedArrayRecursive(allocator, root, path_parts.items, new_value);
}
fn appendToNestedArrayRecursive(allocator: std.mem.Allocator, root: *js_ast.Expr, path: []const []const u8, new_value: js_ast.Expr) !void {
if (path.len == 0) return;
const current_key = path[0];
const remaining_path = path[1..];
if (remaining_path.len == 0) {
// This is the final key - handle array append
const existing = root.get(current_key);
if (existing == null) {
// Create new array
const array_items = try allocator.alloc(js_ast.Expr, 1);
array_items[0] = new_value;
const new_array = js_ast.Expr.init(js_ast.E.Array, js_ast.E.Array{
.items = js_ast.ExprNodeList.init(array_items),
.close_bracket_loc = logger.Loc.Empty,
.is_single_line = true,
}, logger.Loc.Empty);
try root.data.e_object.put(allocator, current_key, new_array);
} else {
// Append to existing
if (existing.?.data != .e_array) {
return error.ArrayAppendToNonArray;
}
const old_items = existing.?.data.e_array.items.slice();
const new_items = try allocator.alloc(js_ast.Expr, old_items.len + 1);
@memcpy(new_items[0..old_items.len], old_items);
new_items[old_items.len] = new_value;
const updated_array = js_ast.Expr.init(js_ast.E.Array, js_ast.E.Array{
.items = js_ast.ExprNodeList.init(new_items),
.close_bracket_loc = existing.?.data.e_array.close_bracket_loc,
.is_single_line = existing.?.data.e_array.is_single_line,
}, existing.?.loc);
try root.data.e_object.put(allocator, current_key, updated_array);
}
return;
}
// Navigate deeper
var nested_obj = root.get(current_key);
if (nested_obj == null or nested_obj.?.data != .e_object) {
const new_obj = js_ast.Expr.init(js_ast.E.Object, js_ast.E.Object{}, logger.Loc.Empty);
try root.data.e_object.put(allocator, current_key, new_obj);
nested_obj = root.get(current_key);
}
if (nested_obj.?.data != .e_object) {
return error.ExpectedObject;
}
var nested = nested_obj.?;
try appendToNestedArrayRecursive(allocator, &nested, remaining_path, new_value);
try root.data.e_object.put(allocator, current_key, nested);
}
fn setValue(allocator: std.mem.Allocator, root: *js_ast.Expr, key: []const u8, value: []const u8, parse_json: bool) !void {
if (root.data != .e_object) {
return error.InvalidRoot;
}
// Check for array append syntax (key[]=value)
if (strings.endsWith(key, "[]")) {
const array_key = key[0..key.len - 2];
return try appendToArray(allocator, root, array_key, value, parse_json);
}
if (strings.indexOf(key, "[") == null) {
var parts = std.mem.tokenizeScalar(u8, key, '.');
var path_parts = std.ArrayList([]const u8).init(allocator);

View File

@@ -1099,6 +1099,127 @@ describe("bun pm pkg", () => {
});
});
describe("array append functionality (npm compatibility)", () => {
it("should append to existing array using key[]=value syntax", async () => {
const { code } = await runPmPkg(["set", "keywords[]=newkeyword"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "keywords"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toContain("test");
expect(parsed).toContain("package");
expect(parsed).toContain("newkeyword");
expect(parsed).toHaveLength(3);
});
it("should create new array when property doesn't exist using key[]=value syntax", async () => {
const { code } = await runPmPkg(["set", "newarrayfield[]=firstitem"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "newarrayfield"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toEqual(["firstitem"]);
});
it("should append multiple items to array in single command", async () => {
const { code } = await runPmPkg(["set", "keywords[]=first", "keywords[]=second"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "keywords"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toContain("test");
expect(parsed).toContain("package");
expect(parsed).toContain("first");
expect(parsed).toContain("second");
expect(parsed).toHaveLength(4);
});
it("should handle nested array append syntax", async () => {
const { code } = await runPmPkg(["set", "config.tags[]=newtag"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "config.tags"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toEqual(["newtag"]);
});
it("should append to nested existing array", async () => {
// First create a nested array
const { code: setupCode } = await runPmPkg(["set", 'nested.items=["existing"]', "--json"], testDir!);
expect(setupCode).toBe(0);
// Then append to it
const { code } = await runPmPkg(["set", "nested.items[]=appended"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "nested.items"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toEqual(["existing", "appended"]);
});
it("should error when trying to append to non-array property", async () => {
const { error, code } = await runPmPkg(["set", "name[]=invalid"], testDir!, false);
expect(code).toBe(1);
expect(error).toContain("Property name[] already exists and is not an Array or Object");
});
it("should handle array append with JSON values", async () => {
const { code } = await runPmPkg(["set", 'numbers[]=42', "--json"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "numbers"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toEqual([42]);
expect(typeof parsed[0]).toBe("number");
});
it("should handle array append with complex JSON objects", async () => {
const { code } = await runPmPkg(["set", 'contributors[]={"name":"New Contributor","email":"new@example.com"}', "--json"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "contributors"], testDir!);
const parsed = JSON.parse(getOutput);
expect(Array.isArray(parsed)).toBe(true);
expect(parsed).toHaveLength(3); // Original 2 plus 1 new
expect(parsed[2]).toEqual({
name: "New Contributor",
email: "new@example.com"
});
});
it("should maintain array order when appending", async () => {
const { code } = await runPmPkg(["set", "order[]=first", "order[]=second", "order[]=third"], testDir!);
expect(code).toBe(0);
const { output: getOutput } = await runPmPkg(["get", "order"], testDir!);
const parsed = JSON.parse(getOutput);
expect(parsed).toEqual(["first", "second", "third"]);
});
it("should preserve existing array structure when appending", async () => {
// Get original keywords array
const { output: originalOutput } = await runPmPkg(["get", "keywords"], testDir!);
const originalKeywords = JSON.parse(originalOutput);
// Append new item
const { code } = await runPmPkg(["set", "keywords[]=appended"], testDir!);
expect(code).toBe(0);
// Verify structure is preserved with new item added
const { output: newOutput } = await runPmPkg(["get", "keywords"], testDir!);
const newKeywords = JSON.parse(newOutput);
expect(newKeywords.slice(0, originalKeywords.length)).toEqual(originalKeywords);
expect(newKeywords[newKeywords.length - 1]).toBe("appended");
});
});
// npm does the actual "" key, but bun right now doesn't support it
describe.todo("empty string key compatibility", () => {
let emptyKeyDir: string;