From 6898e8a76e16df23f14bbcac1a25cf5cd332f2ee Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 13:26:44 +0000 Subject: [PATCH] [autofix.ci] apply automated fixes --- src/cli.zig | 2 +- src/cli/stats_command.zig | 257 ++++++++++++++------------------- test/cli/stats-command.test.ts | 40 ++--- 3 files changed, 121 insertions(+), 178 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index a6acbf7803..5f2bd2bdd5 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1300,7 +1300,7 @@ pub const Command = struct { \\Analyzes JavaScript, TypeScript, CSS, and JSON files. \\ ; - + Output.pretty(intro_text, .{}); Output.flush(); }, diff --git a/src/cli/stats_command.zig b/src/cli/stats_command.zig index 249fd7a485..87a7871e7b 100644 --- a/src/cli/stats_command.zig +++ b/src/cli/stats_command.zig @@ -1,18 +1,3 @@ -const std = @import("std"); -const bun = @import("bun"); -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const Command = @import("../cli.zig").Command; -const strings = bun.strings; -const logger = bun.logger; -const options = @import("../options.zig"); -const transpiler = @import("../transpiler.zig"); -const BundleV2 = @import("../bundler/bundle_v2.zig").BundleV2; -const Graph = @import("../bundler/Graph.zig"); -const BundledAst = @import("../ast/BundledAst.zig"); -const ImportRecord = @import("../import_record.zig").ImportRecord; - pub const StatsCommand = struct { const FileStats = struct { files: u32 = 0, @@ -44,33 +29,34 @@ pub const StatsCommand = struct { fn countLinesAndLOC(content: []const u8) struct { lines: u32, loc: u32 } { var lines: u32 = if (content.len > 0) 1 else 0; var loc: u32 = 0; - + var i: usize = 0; var line_start: usize = 0; var in_block_comment = false; - + while (i < content.len) : (i += 1) { if (content[i] == '\n') { const line = content[line_start..i]; const trimmed = std.mem.trim(u8, line, " \t\r"); - + // Check for block comments if (std.mem.indexOf(u8, trimmed, "/*") != null) { in_block_comment = true; } if (std.mem.indexOf(u8, trimmed, "*/") != null) { in_block_comment = false; - } else if (trimmed.len > 0 and - !in_block_comment and - !std.mem.startsWith(u8, trimmed, "//")) { + } else if (trimmed.len > 0 and + !in_block_comment and + !std.mem.startsWith(u8, trimmed, "//")) + { loc += 1; } - + lines += 1; line_start = i + 1; } } - + // Handle last line without newline if (line_start < content.len) { const line = content[line_start..]; @@ -79,7 +65,7 @@ pub const StatsCommand = struct { loc += 1; } } - + return .{ .lines = lines, .loc = loc }; } @@ -93,113 +79,78 @@ pub const StatsCommand = struct { fn printTable(stats: *const CategoryStats, workspace_package_names: []const []const u8) void { _ = workspace_package_names; // TODO: implement workspace package stats - + // Print header - Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ - "-", "-", "-", "-", "-", "-" - }); - Output.pretty("| {s:<16} | {s:>7} | {s:>7} | {s:>7} | {s:>7} | {s:>7} |\n", .{ - "Name", "Files", "Lines", "LOC", "Imports", "Exports" - }); - Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ - "-", "-", "-", "-", "-", "-" - }); - + Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ "-", "-", "-", "-", "-", "-" }); + Output.pretty("| {s:<16} | {s:>7} | {s:>7} | {s:>7} | {s:>7} | {s:>7} |\n", .{ "Name", "Files", "Lines", "LOC", "Imports", "Exports" }); + Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ "-", "-", "-", "-", "-", "-" }); + // Print rows if (stats.typescript.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "TypeScript", stats.typescript.files, stats.typescript.lines, - stats.typescript.loc, stats.typescript.imports, stats.typescript.exports - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "TypeScript", stats.typescript.files, stats.typescript.lines, stats.typescript.loc, stats.typescript.imports, stats.typescript.exports }); } - + if (stats.javascript.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "JavaScript", stats.javascript.files, stats.javascript.lines, - stats.javascript.loc, stats.javascript.imports, stats.javascript.exports - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "JavaScript", stats.javascript.files, stats.javascript.lines, stats.javascript.loc, stats.javascript.imports, stats.javascript.exports }); } - + if (stats.commonjs.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "CommonJS modules", stats.commonjs.files, stats.commonjs.lines, - stats.commonjs.loc, stats.commonjs.imports, stats.commonjs.exports - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "CommonJS modules", stats.commonjs.files, stats.commonjs.lines, stats.commonjs.loc, stats.commonjs.imports, stats.commonjs.exports }); } - + if (stats.esmodules.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "ES modules", stats.esmodules.files, stats.esmodules.lines, - stats.esmodules.loc, stats.esmodules.imports, stats.esmodules.exports - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "ES modules", stats.esmodules.files, stats.esmodules.lines, stats.esmodules.loc, stats.esmodules.imports, stats.esmodules.exports }); } - + if (stats.css.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ - "CSS", stats.css.files, stats.css.lines, stats.css.loc, "-", "-" - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ "CSS", stats.css.files, stats.css.lines, stats.css.loc, "-", "-" }); } - + if (stats.json.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ - "JSON", stats.json.files, stats.json.lines, stats.json.loc, "-", "-" - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ "JSON", stats.json.files, stats.json.lines, stats.json.loc, "-", "-" }); } - + if (stats.tests.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "Tests", stats.tests.files, stats.tests.lines, - stats.tests.loc, stats.tests.imports, stats.tests.exports - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "Tests", stats.tests.files, stats.tests.lines, stats.tests.loc, stats.tests.imports, stats.tests.exports }); } - + if (stats.node_modules.files > 0) { - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ - "node_modules", stats.node_modules.files, stats.node_modules.lines, - stats.node_modules.loc, "-", "-" - }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {s:>7} | {s:>7} |\n", .{ "node_modules", stats.node_modules.files, stats.node_modules.lines, stats.node_modules.loc, "-", "-" }); } - + // Print total - Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ - "-", "-", "-", "-", "-", "-" - }); - Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ - "Total", stats.total.files, stats.total.lines, - stats.total.loc, stats.total.imports, stats.total.exports - }); - Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ - "-", "-", "-", "-", "-", "-" - }); + Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ "-", "-", "-", "-", "-", "-" }); + Output.pretty("| {s:<16} | {d:>7} | {d:>7} | {d:>7} | {d:>7} | {d:>7} |\n", .{ "Total", stats.total.files, stats.total.lines, stats.total.loc, stats.total.imports, stats.total.exports }); + Output.pretty("+{s:-<18}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+{s:-<9}+\n", .{ "-", "-", "-", "-", "-", "-" }); } fn printSummary(stats: *const CategoryStats, workspace_count: usize, reachable_count: usize, source_size: u64, elapsed_ms: u64) void { const code_loc = stats.total.loc -| stats.node_modules.loc -| stats.tests.loc; const test_loc = stats.tests.loc; const deps_loc = stats.node_modules.loc; - + Output.pretty("\n", .{}); - + // Speed flex message Output.pretty(" Analyzed {d} LOC across {d} files in {d}ms\n", .{ stats.total.loc, stats.total.files, elapsed_ms, }); - + Output.pretty("\n", .{}); Output.pretty("Files analyzed: {d}\n", .{reachable_count}); Output.pretty("Code LOC: {d}\n", .{code_loc}); Output.pretty("Test LOC: {d}\n", .{test_loc}); Output.pretty("Deps LOC: {d}\n", .{deps_loc}); - + if (code_loc > 0 and test_loc > 0) { const ratio = @as(f32, @floatFromInt(test_loc)) / @as(f32, @floatFromInt(code_loc)); Output.pretty("Code to Test Ratio: 1 : {d:.1}\n", .{ratio}); } - + Output.pretty("Workspace Packages: {d}\n", .{workspace_count}); - + // Use actual source size from bundler if (source_size > 0) { const size_mb = @as(f32, @floatFromInt(source_size)) / 1024.0 / 1024.0; @@ -219,11 +170,11 @@ pub const StatsCommand = struct { ) anyerror!void { const ctx = @as(*StatsContext, @ptrCast(@alignCast(ctx_))); const bundle = result.bundle_v2; - + // Access the parsed graph data const graph = &bundle.graph; const ast = &graph.ast; - + // Get the MultiArrayList slices const sources = graph.input_files.items(.source); const loaders = graph.input_files.items(.loader); @@ -231,41 +182,41 @@ pub const StatsCommand = struct { const exports_kind = ast.items(.exports_kind); const named_exports = ast.items(.named_exports); const export_star_import_records = ast.items(.export_star_import_records); - + // Process each reachable file for (result.reachable_files) |source_index| { const index = source_index.get(); if (index >= sources.len) continue; - + // Skip the runtime file (index 0) if (index == 0) continue; - + const source = sources[index]; const loader = loaders[index]; const imports = if (index < import_records.len) import_records[index] else ImportRecord.List{}; const export_kind = if (index < exports_kind.len) exports_kind[index] else .none; const named_export_map = if (index < named_exports.len) named_exports[index] else bun.StringArrayHashMapUnmanaged(bun.ast.NamedExport){}; const export_stars = if (index < export_star_import_records.len) export_star_import_records[index] else &[_]u32{}; - + // Get source content and path const source_contents = source.contents; const path_text = source.path.text; - + // Skip virtual files and bun: files - if (strings.hasPrefixComptime(path_text, "bun:") or + if (strings.hasPrefixComptime(path_text, "bun:") or strings.hasPrefixComptime(path_text, "node:") or strings.hasPrefixComptime(path_text, "<") or strings.eqlComptime(path_text, "bun")) continue; - + // Count lines and LOC const line_stats = countLinesAndLOC(source_contents); - + // Categorize file - const is_test = std.mem.indexOf(u8, path_text, ".test.") != null or - std.mem.indexOf(u8, path_text, ".spec.") != null or - std.mem.indexOf(u8, path_text, "__tests__") != null; + const is_test = std.mem.indexOf(u8, path_text, ".test.") != null or + std.mem.indexOf(u8, path_text, ".spec.") != null or + std.mem.indexOf(u8, path_text, "__tests__") != null; const is_node_modules = std.mem.indexOf(u8, path_text, "node_modules") != null; - + // Determine workspace package var workspace_pkg: ?[]const u8 = null; for (ctx.workspace_packages) |pkg_name| { @@ -274,11 +225,11 @@ pub const StatsCommand = struct { break; } } - + // Count imports and exports const import_count: u32 = @intCast(imports.len); const export_count: u32 = @intCast(named_export_map.count() + export_stars.len); - + var file_stats = FileStats{ .files = 1, .lines = line_stats.lines, @@ -286,11 +237,11 @@ pub const StatsCommand = struct { .imports = import_count, .exports = export_count, }; - + // Determine module type from exports_kind const is_commonjs = export_kind == .cjs; const is_esm = export_kind == .esm; - + // Update appropriate category based on loader type switch (loader) { .tsx, .ts => { @@ -321,7 +272,7 @@ pub const StatsCommand = struct { }, else => {}, } - + // Add to category totals if (is_node_modules) { file_stats.imports = 0; @@ -334,7 +285,7 @@ pub const StatsCommand = struct { addStats(pkg_stats, &file_stats); } } - + // Always add to total addStats(&ctx.stats.total, &file_stats); } @@ -348,7 +299,7 @@ pub const StatsCommand = struct { } files.deinit(); } - + // Simple recursive directory walker var stack = std.ArrayList([]const u8).init(allocator); defer { @@ -357,28 +308,28 @@ pub const StatsCommand = struct { } stack.deinit(); } - + try stack.append(try allocator.dupe(u8, dir_path)); - + while (stack.items.len > 0) { const current_dir = stack.pop() orelse break; defer allocator.free(current_dir); - + // Skip node_modules and hidden directories if (std.mem.indexOf(u8, current_dir, "node_modules") != null or std.mem.indexOf(u8, current_dir, "/.git") != null or std.mem.indexOf(u8, current_dir, "/.next") != null) continue; - + var dir = std.fs.openDirAbsolute(current_dir, .{ .iterate = true }) catch |err| { if (err == error.NotDir or err == error.FileNotFound) continue; return err; }; defer dir.close(); - + var iter = dir.iterate(); while (try iter.next()) |entry| { const full_path = try std.fs.path.join(allocator, &.{ current_dir, entry.name }); - + switch (entry.kind) { .directory => { // Add directory to stack for processing @@ -388,16 +339,16 @@ pub const StatsCommand = struct { // Check if it's a JS/TS/JSON/CSS file const ext = std.fs.path.extension(entry.name); const is_js_file = std.mem.eql(u8, ext, ".js") or - std.mem.eql(u8, ext, ".jsx") or - std.mem.eql(u8, ext, ".ts") or - std.mem.eql(u8, ext, ".tsx") or - std.mem.eql(u8, ext, ".mjs") or - std.mem.eql(u8, ext, ".cjs") or - std.mem.eql(u8, ext, ".mts") or - std.mem.eql(u8, ext, ".cts") or - std.mem.eql(u8, ext, ".json") or - std.mem.eql(u8, ext, ".css"); - + std.mem.eql(u8, ext, ".jsx") or + std.mem.eql(u8, ext, ".ts") or + std.mem.eql(u8, ext, ".tsx") or + std.mem.eql(u8, ext, ".mjs") or + std.mem.eql(u8, ext, ".cjs") or + std.mem.eql(u8, ext, ".mts") or + std.mem.eql(u8, ext, ".cts") or + std.mem.eql(u8, ext, ".json") or + std.mem.eql(u8, ext, ".css"); + if (is_js_file) { try files.append(full_path); } else { @@ -410,21 +361,21 @@ pub const StatsCommand = struct { } } } - + return files.toOwnedSlice(); } - + pub fn exec(ctx: Command.Context) !void { Global.configureAllocator(.{ .long_running = true }); const allocator = ctx.allocator; const log = ctx.log; - + const start_time = std.time.nanoTimestamp(); - + // Set up the bundler context similar to build command ctx.args.target = .bun; // Use bun target to resolve test files and Bun-specific imports ctx.args.packages = .bundle; // Bundle mode to analyze all files - + // Get workspace packages const workspace_packages = try getWorkspacePackages(allocator); defer { @@ -433,7 +384,7 @@ pub const StatsCommand = struct { } allocator.free(workspace_packages); } - + // Initialize stats context var stats_ctx = StatsContext{ .stats = CategoryStats{ @@ -443,15 +394,15 @@ pub const StatsCommand = struct { .workspace_packages = workspace_packages, }; defer stats_ctx.stats.workspace_packages.deinit(); - + // Initialize workspace package stats for (workspace_packages) |pkg| { try stats_ctx.stats.workspace_packages.put(pkg, FileStats{}); } - + // Set up transpiler var this_transpiler = try transpiler.Transpiler.init(allocator, log, ctx.args, null); - + // Handle entry points based on user input var allocated_entry_points: ?[][]const u8 = null; defer if (allocated_entry_points) |entry_points| { @@ -460,7 +411,7 @@ pub const StatsCommand = struct { } allocator.free(entry_points); }; - + if (ctx.args.entry_points.len > 0) { // User provided entry points - use them directly this_transpiler.options.entry_points = ctx.args.entry_points; @@ -468,36 +419,36 @@ pub const StatsCommand = struct { // No entry points provided - walk directory to find all JS/TS files const cwd = try std.process.getCwdAlloc(allocator); defer allocator.free(cwd); - + allocated_entry_points = try findAllJSFiles(allocator, cwd); this_transpiler.options.entry_points = allocated_entry_points.?; } - + this_transpiler.options.output_dir = ""; // No output needed this_transpiler.options.write = false; // Don't write files this_transpiler.configureLinker(); try this_transpiler.configureDefines(); - + // Set up the dependencies scanner to collect stats var scanner = BundleV2.DependenciesScanner{ .ctx = &stats_ctx, .entry_points = this_transpiler.options.entry_points, .onFetch = onStatsCollect, }; - + // Run the bundler to parse all files var reachable_file_count: usize = 0; var minify_duration: u64 = 0; var source_code_size: u64 = 0; - + if (this_transpiler.options.entry_points.len == 0) { Output.prettyErrorln("error: No files found to analyze", .{}); return; } - + Output.pretty("Analyzing {d} files...\n", .{this_transpiler.options.entry_points.len}); Output.flush(); - + _ = BundleV2.generateFromCLI( &this_transpiler, allocator, @@ -512,20 +463,32 @@ pub const StatsCommand = struct { if (this_transpiler.log.hasErrors()) { this_transpiler.log.print(Output.writer()) catch {}; } - + // It's okay if bundling fails, we still collected stats if (err != error.BuildFailed) { return err; } }; - + // Calculate elapsed time const end_time = std.time.nanoTimestamp(); const elapsed_ns = @as(u64, @intCast(end_time - start_time)); const elapsed_ms = elapsed_ns / std.time.ns_per_ms; - + // Print results printTable(&stats_ctx.stats, workspace_packages); printSummary(&stats_ctx.stats, workspace_packages.len, reachable_file_count, source_code_size, elapsed_ms); } -}; \ No newline at end of file +}; + +const options = @import("../options.zig"); +const std = @import("std"); +const transpiler = @import("../transpiler.zig"); +const BundleV2 = @import("../bundler/bundle_v2.zig").BundleV2; +const Command = @import("../cli.zig").Command; +const ImportRecord = @import("../import_record.zig").ImportRecord; + +const bun = @import("bun"); +const Global = bun.Global; +const Output = bun.Output; +const strings = bun.strings; diff --git a/test/cli/stats-command.test.ts b/test/cli/stats-command.test.ts index a3d8d67692..cdd1f8d9bc 100644 --- a/test/cli/stats-command.test.ts +++ b/test/cli/stats-command.test.ts @@ -1,6 +1,5 @@ -import { test, expect } from "bun:test"; +import { expect, test } from "bun:test"; import { bunEnv, bunExe, tempDir } from "harness"; -import { join } from "path"; test("bun stats - basic functionality", async () => { using dir = tempDir("stats-test", { @@ -18,15 +17,11 @@ test("bun stats - basic functionality", async () => { stderr: "pipe", }); - const [stdout, stderr, exitCode] = await Promise.all([ - proc.stdout.text(), - proc.stderr.text(), - proc.exited, - ]); + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); expect(exitCode).toBe(0); expect(stderr).toBe(""); - + // Check that the output contains expected sections expect(stdout).toContain("JavaScript"); expect(stdout).toContain("ES modules"); @@ -52,15 +47,11 @@ test("bun stats - with TypeScript files", async () => { stderr: "pipe", }); - const [stdout, stderr, exitCode] = await Promise.all([ - proc.stdout.text(), - proc.stderr.text(), - proc.exited, - ]); + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); expect(exitCode).toBe(0); expect(stderr).toBe(""); - + // Check TypeScript stats expect(stdout).toContain("TypeScript"); expect(stdout).toContain("Tests"); @@ -82,15 +73,11 @@ test("bun stats - handles CommonJS and ES modules", async () => { stderr: "pipe", }); - const [stdout, stderr, exitCode] = await Promise.all([ - proc.stdout.text(), - proc.stderr.text(), - proc.exited, - ]); + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); expect(exitCode).toBe(0); expect(stderr).toBe(""); - + // Check module type detection expect(stdout).toContain("CommonJS modules"); expect(stdout).toContain("ES modules"); @@ -116,10 +103,7 @@ test("bun stats - counts imports and exports", async () => { stderr: "pipe", }); - const [stdout] = await Promise.all([ - proc.stdout.text(), - proc.exited, - ]); + const [stdout] = await Promise.all([proc.stdout.text(), proc.exited]); // Should count imports and exports - check the table contains expected values expect(stdout).toContain("| 3 | 2 |"); @@ -133,15 +117,11 @@ test("bun stats --help", async () => { stderr: "pipe", }); - const [stdout, stderr, exitCode] = await Promise.all([ - proc.stdout.text(), - proc.stderr.text(), - proc.exited, - ]); + const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]); expect(exitCode).toBe(0); expect(stderr).toBe(""); expect(stdout).toContain("Usage:"); expect(stdout).toContain("bun stats"); expect(stdout).toContain("Generate a comprehensive code statistics report"); -}); \ No newline at end of file +});