Compare commits

...

3 Commits

Author SHA1 Message Date
claude[bot]
8e3c46a56c fix: Fix compile errors in package_manager_command.zig
- Remove unused `path` local constant in package_manager_command.zig
- Improve error message in zig-clap to provide more context about missing parameters

Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>"
2025-05-22 06:40:47 +00:00
Jarred-Sumner
244513116f bun run prettier 2025-05-21 19:36:09 +00:00
Jarred Sumner
776cf69a1d feat(pm): add --json option to ls 2025-05-21 12:33:20 -07:00
6 changed files with 215 additions and 24 deletions

View File

@@ -67,6 +67,16 @@ $ bun pm ls --all
├── ...
```
To output the list in JSON format, use `--json`. This can be combined with `--all`.
```bash
$ bun pm ls --json
["eslint@8.38.0", "react@18.2.0", ...]
$ bun pm ls --all --json
["@eslint-community/eslint-utils@4.4.0", ...]
```
## whoami
Print your npm username. Requires you to be logged in (`bunx npm login`) with credentials in either `bunfig.toml` or `.npmrc`:

View File

@@ -166,6 +166,13 @@ my-pkg node_modules
...
```
You can output the list in JSON format with `--json`:
```sh
$ bun pm ls --json
["@types/node@20.16.5", "@types/react@18.3.8", ...]
```
---
## Create a package tarball

View File

@@ -317,6 +317,7 @@ pub const PackageManagerCommand = struct {
Output.flush();
Output.disableBuffering();
const json_output = strings.leftHasAnyInRight(args, &.{ "--json" }) or pm.options.json_output;
const lockfile = load_lockfile.ok.lockfile;
var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile);
@@ -349,45 +350,81 @@ pub const PackageManagerCommand = struct {
@memset(more_packages, false);
if (first_directory.dependencies.len > 1) more_packages[0] = true;
if (strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" })) {
try printNodeModulesFolderStructure(&first_directory, null, 0, &directories, lockfile, more_packages);
if (!json_output) {
if (strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" })) {
try printNodeModulesFolderStructure(&first_directory, null, 0, &directories, lockfile, more_packages);
} else {
var cwd_buf: bun.PathBuffer = undefined;
const path = bun.getcwd(&cwd_buf) catch {
Output.prettyErrorln("<r><red>error<r>: Could not get current working directory", .{});
Global.exit(1);
};
const dependencies = lockfile.buffers.dependencies.items;
const slice = lockfile.packages.slice();
const resolutions = slice.items(.resolution);
const root_deps = slice.items(.dependencies)[0];
Output.println("{s} node_modules ({d})", .{ path, lockfile.buffers.hoisted_dependencies.items.len });
const string_bytes = lockfile.buffers.string_bytes.items;
const sorted_dependencies = try ctx.allocator.alloc(DependencyID, root_deps.len);
defer ctx.allocator.free(sorted_dependencies);
for (sorted_dependencies, 0..) |*dep, i| {
dep.* = @as(DependencyID, @truncate(root_deps.off + i));
}
std.sort.pdq(DependencyID, sorted_dependencies, ByName{
.dependencies = dependencies,
.buf = string_bytes,
}, ByName.isLessThan);
for (sorted_dependencies, 0..) |dependency_id, index| {
const package_id = lockfile.buffers.resolutions.items[dependency_id];
if (package_id >= lockfile.packages.len) continue;
const name = dependencies[dependency_id].name.slice(string_bytes);
const resolution = resolutions[package_id].fmt(string_bytes, .auto);
if (index < sorted_dependencies.len - 1) {
Output.prettyln("<d>├──<r> {s}<r><d>@{any}<r>\n", .{ name, resolution });
} else {
Output.prettyln("<d>└──<r> {s}<r><d>@{any}<r>\n", .{ name, resolution });
}
}
}
} else if (strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" })) {
try Output.writer().writeByte('[');
var first: bool = true;
try printNodeModulesFolderStructureJSON(&first_directory, &directories, lockfile, &first);
try Output.writer().writeAll("]\n");
} else {
var cwd_buf: bun.PathBuffer = undefined;
const path = bun.getcwd(&cwd_buf) catch {
Output.prettyErrorln("<r><red>error<r>: Could not get current working directory", .{});
Global.exit(1);
};
const dependencies = lockfile.buffers.dependencies.items;
const slice = lockfile.packages.slice();
const resolutions = slice.items(.resolution);
const root_deps = slice.items(.dependencies)[0];
Output.println("{s} node_modules ({d})", .{ path, lockfile.buffers.hoisted_dependencies.items.len });
try Output.writer().writeByte('[');
var first: bool = true;
const string_bytes = lockfile.buffers.string_bytes.items;
const sorted_dependencies = try ctx.allocator.alloc(DependencyID, root_deps.len);
defer ctx.allocator.free(sorted_dependencies);
for (sorted_dependencies, 0..) |*dep, i| {
dep.* = @as(DependencyID, @truncate(root_deps.off + i));
}
std.sort.pdq(DependencyID, sorted_dependencies, ByName{
.dependencies = dependencies,
.buf = string_bytes,
}, ByName.isLessThan);
std.sort.pdq(DependencyID, sorted_dependencies, ByName{ .dependencies = dependencies, .buf = string_bytes }, ByName.isLessThan);
for (sorted_dependencies, 0..) |dependency_id, index| {
for (sorted_dependencies) |dependency_id| {
const package_id = lockfile.buffers.resolutions.items[dependency_id];
if (package_id >= lockfile.packages.len) continue;
const name = dependencies[dependency_id].name.slice(string_bytes);
const resolution = resolutions[package_id].fmt(string_bytes, .auto);
if (index < sorted_dependencies.len - 1) {
Output.prettyln("<d>├──<r> {s}<r><d>@{any}<r>\n", .{ name, resolution });
} else {
Output.prettyln("<d>└──<r> {s}<r><d>@{any}<r>\n", .{ name, resolution });
}
var res_buf: [512]u8 = undefined;
const ver = try std.fmt.bufPrint(&res_buf, "{}", .{resolutions[package_id].fmt(string_bytes, .auto)});
const entry = try std.fmt.allocPrint(ctx.allocator, "{s}@{s}", .{ name, ver });
defer ctx.allocator.free(entry);
if (!first) try Output.writer().writeByte(',');
first = false;
try Output.writer().print("{}", .{bun.fmt.formatJSONStringUTF8(entry, .{})});
}
try Output.writer().writeAll("]\n");
}
Global.exit(0);
} else if (strings.eqlComptime(subcommand, "migrate")) {
if (!pm.options.enable.force_save_lockfile) {
@@ -562,3 +599,47 @@ fn printNodeModulesFolderStructure(
Output.prettyln("{s}<d>@{s}<r>", .{ package_name, package_version });
}
}
fn printNodeModulesFolderStructureJSON(
directory: *const NodeModulesFolder,
directories: *std.ArrayList(NodeModulesFolder),
lockfile: *Lockfile,
first: *bool,
) !void {
const allocator = lockfile.allocator;
const resolutions = lockfile.packages.items(.resolution);
const string_bytes = lockfile.buffers.string_bytes.items;
const dependencies = lockfile.buffers.dependencies.items;
const sorted_dependencies = try allocator.alloc(DependencyID, directory.dependencies.len);
defer allocator.free(sorted_dependencies);
bun.copy(DependencyID, sorted_dependencies, directory.dependencies);
std.sort.pdq(DependencyID, sorted_dependencies, ByName{ .dependencies = dependencies, .buf = string_bytes }, ByName.isLessThan);
for (sorted_dependencies) |dependency_id| {
const package_id = lockfile.buffers.resolutions.items[dependency_id];
if (package_id >= lockfile.packages.len) continue;
const name = dependencies[dependency_id].name.slice(string_bytes);
var res_buf: [512]u8 = undefined;
const version = try std.fmt.bufPrint(&res_buf, "{}", .{resolutions[package_id].fmt(string_bytes, .auto)});
const entry = try std.fmt.allocPrint(allocator, "{s}@{s}", .{ name, version });
defer allocator.free(entry);
if (!first.*) try Output.writer().writeByte(',');
first.* = false;
try Output.writer().print("{}", .{bun.fmt.formatJSONStringUTF8(entry, .{})});
const fmt = "{s}" ++ std.fs.path.sep_str ++ "{s}" ++ std.fs.path.sep_str ++ "node_modules";
const possible_path = try std.fmt.allocPrint(allocator, fmt, .{ directory.relative_path, name });
defer allocator.free(possible_path);
var dir_index: usize = 0;
while (dir_index < directories.items.len) : (dir_index += 1) {
if (strings.eqlLong(possible_path, directories.items[dir_index].relative_path, true)) {
const next = directories.orderedRemove(dir_index);
try printNodeModulesFolderStructureJSON(&next, directories, lockfile, first);
break;
}
}
}
}

View File

@@ -184,7 +184,7 @@ pub fn ComptimeClap(
}
}
@compileError(name ++ " is not a parameter.");
@compileError(name ++ " is not a parameter. Make sure it's defined in the params list.");
}
}
};

View File

@@ -7213,7 +7213,7 @@ pub const PackageManager = struct {
pack_destination: string = "",
pack_filename: string = "",
pack_gzip_level: ?string = null,
// json_output: bool = false,
json_output: bool = false,
max_retry_count: u16 = 5,
min_simultaneous_requests: usize = 4,
@@ -7627,7 +7627,7 @@ pub const PackageManager = struct {
this.pack_destination = cli.pack_destination;
this.pack_filename = cli.pack_filename;
this.pack_gzip_level = cli.pack_gzip_level;
// this.json_output = cli.json_output;
this.json_output = cli.json_output;
if (cli.no_cache) {
this.enable.manifest_cache = false;
@@ -9698,6 +9698,7 @@ pub const PackageManager = struct {
clap.parseParam("--destination <STR> The directory the tarball will be saved in") catch unreachable,
clap.parseParam("--filename <STR> The filename of the tarball") catch unreachable,
clap.parseParam("--gzip-level <STR> Specify a custom compression level for gzip. Default is 9.") catch unreachable,
clap.parseParam("--json") catch unreachable,
clap.parseParam("<POS> ... ") catch unreachable,
});
@@ -9785,7 +9786,7 @@ pub const PackageManager = struct {
trusted: bool = false,
no_summary: bool = false,
latest: bool = false,
// json_output: bool = false,
json_output: bool = false,
filters: []const string = &.{},
pack_destination: string = "",
@@ -10154,6 +10155,9 @@ pub const PackageManager = struct {
cli.ignore_scripts = args.flag("--ignore-scripts");
cli.trusted = args.flag("--trust");
cli.no_summary = args.flag("--no-summary");
if (comptime subcommand == .pm or subcommand == .outdated) {
cli.json_output = args.flag("--json");
}
cli.ca = args.options("--ca");
cli.lockfile_only = args.flag("--lockfile-only");

View File

@@ -79,6 +79,95 @@ it("should list top-level dependency", async () => {
expect(requested).toBe(2);
});
it("should list all dependencies as json", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls));
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "foo",
version: "0.0.1",
dependencies: {
moo: "./moo",
},
}),
);
await mkdir(join(package_dir, "moo"));
await writeFile(
join(package_dir, "moo", "package.json"),
JSON.stringify({
name: "moo",
version: "0.1.0",
dependencies: { bar: "latest" },
}),
);
{
const { stderr, stdout, exited } = spawn({
cmd: [bunExe(), "install"],
cwd: package_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env,
});
await new Response(stderr).text();
await exited;
}
urls.length = 0;
const { stdout, stderr, exited } = spawn({
cmd: [bunExe(), "pm", "ls", "--all", "--json"],
cwd: package_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env,
});
expect(await new Response(stderr).text()).toBe("");
expect(await new Response(stdout).text()).toBe('["bar@0.0.2","moo@moo"]\n');
expect(await exited).toBe(0);
});
it("should list top-level dependency as json", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls));
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
name: "foo",
version: "0.0.1",
dependencies: {
moo: "./moo",
},
}),
);
await mkdir(join(package_dir, "moo"));
await writeFile(join(package_dir, "moo", "package.json"), JSON.stringify({ name: "moo", version: "0.1.0" }));
{
const { stderr, stdout, exited } = spawn({
cmd: [bunExe(), "install"],
cwd: package_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env,
});
await new Response(stderr).text();
await exited;
}
urls.length = 0;
const { stdout, stderr, exited } = spawn({
cmd: [bunExe(), "pm", "ls", "--json"],
cwd: package_dir,
stdout: "pipe",
stdin: "pipe",
stderr: "pipe",
env,
});
expect(await new Response(stderr).text()).toBe("");
expect(await new Response(stdout).text()).toBe('["moo@moo"]\n');
expect(await exited).toBe(0);
});
it("should list all dependencies", async () => {
const urls: string[] = [];
setHandler(dummyRegistry(urls));