mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
feat(build): add --metafile-md CLI option for LLM-friendly bundle analysis (#26441)
## Summary - Adds `--metafile-md` CLI option to `bun build` that generates a markdown visualization of the module graph - Designed to help Claude and other LLMs analyze bundle composition, identify bloat, and understand dependency chains - Reuses existing metafile JSON generation code as a post-processing step ## Features The generated markdown includes: 1. **Quick Summary** - Module counts, sizes, ESM/CJS breakdown, output/input ratio 2. **Largest Input Files** - Sorted by size to identify potential bloat 3. **Entry Point Analysis** - Shows bundle size, exports, CSS bundles, and bundled modules 4. **Dependency Chains** - Most commonly imported modules and reverse dependencies 5. **Full Module Graph** - Complete import/export info for each module 6. **Raw Data for Searching** - Grep-friendly markers in code blocks: - `[MODULE:]`, `[SIZE:]`, `[IMPORT:]`, `[IMPORTED_BY:]` - `[ENTRY:]`, `[EXTERNAL:]`, `[NODE_MODULES:]` ## Usage ```bash # Default filename (meta.md) bun build entry.js --metafile-md --outdir=dist # Custom filename bun build entry.js --metafile-md=analysis.md --outdir=dist # Both JSON and markdown bun build entry.js --metafile=meta.json --metafile-md=meta.md --outdir=dist ``` ## Example Output See sample output: https://gist.github.com/example (will add) ## Test plan - [x] Test default filename (`meta.md`) - [x] Test custom filename - [x] Test both `--metafile` and `--metafile-md` together - [x] Test summary metrics - [x] Test module format info (ESM/CJS) - [x] Test external imports - [x] Test exports list - [x] Test bundled modules table - [x] Test CSS bundle reference - [x] Test import kinds (static, dynamic, require) - [x] Test commonly imported modules - [x] Test largest files sorting (bloat analysis) - [x] Test output/input ratio - [x] Test grep-friendly raw data section - [x] Test entry point markers - [x] Test external import markers - [x] Test node_modules markers All 17 new tests pass. 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Bot <claude-bot@bun.sh> Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
This commit is contained in:
24
meta.json
Normal file
24
meta.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"inputs": {
|
||||||
|
"../../tmp/test-entry.js": {
|
||||||
|
"bytes": 21,
|
||||||
|
"imports": [
|
||||||
|
],
|
||||||
|
"format": "esm"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"./test-entry.js": {
|
||||||
|
"bytes": 49,
|
||||||
|
"inputs": {
|
||||||
|
"../../tmp/test-entry.js": {
|
||||||
|
"bytesInOutput": 22
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"imports": [
|
||||||
|
],
|
||||||
|
"exports": [],
|
||||||
|
"entryPoint": "../../tmp/test-entry.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -433,6 +433,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
|||||||
.asset => {},
|
.asset => {},
|
||||||
.bytecode => {},
|
.bytecode => {},
|
||||||
.sourcemap => {},
|
.sourcemap => {},
|
||||||
|
.@"metafile-json", .@"metafile-markdown" => {},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -242,6 +242,10 @@ pub const JSBundler = struct {
|
|||||||
bytecode: bool = false,
|
bytecode: bool = false,
|
||||||
banner: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
banner: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||||
footer: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
footer: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||||
|
/// Path to write JSON metafile (if specified via metafile object) - TEST: moved here
|
||||||
|
metafile_json_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||||
|
/// Path to write markdown metafile (if specified via metafile object) - TEST: moved here
|
||||||
|
metafile_markdown_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||||
css_chunking: bool = false,
|
css_chunking: bool = false,
|
||||||
drop: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
drop: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||||
features: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
features: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||||
@@ -254,6 +258,7 @@ pub const JSBundler = struct {
|
|||||||
/// In-memory files that can be used as entrypoints or imported.
|
/// In-memory files that can be used as entrypoints or imported.
|
||||||
/// These files do not need to exist on disk.
|
/// These files do not need to exist on disk.
|
||||||
files: FileMap = .{},
|
files: FileMap = .{},
|
||||||
|
/// Generate metafile (JSON module graph)
|
||||||
metafile: bool = false,
|
metafile: bool = false,
|
||||||
|
|
||||||
pub const CompileOptions = struct {
|
pub const CompileOptions = struct {
|
||||||
@@ -936,8 +941,30 @@ pub const JSBundler = struct {
|
|||||||
this.throw_on_error = flag;
|
this.throw_on_error = flag;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (try config.getBooleanLoose(globalThis, "metafile")) |flag| {
|
// Parse metafile option: boolean | string | { json?: string, markdown?: string }
|
||||||
this.metafile = flag;
|
if (try config.getOwn(globalThis, "metafile")) |metafile_value| {
|
||||||
|
if (metafile_value.isBoolean()) {
|
||||||
|
this.metafile = metafile_value == .true;
|
||||||
|
} else if (metafile_value.isString()) {
|
||||||
|
// metafile: "path/to/meta.json" - shorthand for { json: "..." }
|
||||||
|
this.metafile = true;
|
||||||
|
const slice = try metafile_value.toSlice(globalThis, bun.default_allocator);
|
||||||
|
defer slice.deinit();
|
||||||
|
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||||
|
} else if (metafile_value.isObject()) {
|
||||||
|
// metafile: { json?: string, markdown?: string }
|
||||||
|
this.metafile = true;
|
||||||
|
if (try metafile_value.getOptional(globalThis, "json", ZigString.Slice)) |slice| {
|
||||||
|
defer slice.deinit();
|
||||||
|
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||||
|
}
|
||||||
|
if (try metafile_value.getOptional(globalThis, "markdown", ZigString.Slice)) |slice| {
|
||||||
|
defer slice.deinit();
|
||||||
|
try this.metafile_markdown_path.appendSliceExact(slice.slice());
|
||||||
|
}
|
||||||
|
} else if (!metafile_value.isUndefinedOrNull()) {
|
||||||
|
return globalThis.throwInvalidArguments("Expected metafile to be a boolean, string, or object with json/markdown paths", .{});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (try CompileOptions.fromJS(
|
if (try CompileOptions.fromJS(
|
||||||
@@ -1070,6 +1097,8 @@ pub const JSBundler = struct {
|
|||||||
self.footer.deinit();
|
self.footer.deinit();
|
||||||
self.tsconfig_override.deinit();
|
self.tsconfig_override.deinit();
|
||||||
self.files.deinitAndUnprotect();
|
self.files.deinitAndUnprotect();
|
||||||
|
self.metafile_json_path.deinit();
|
||||||
|
self.metafile_markdown_path.deinit();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1688,9 +1717,11 @@ pub const BuildArtifact = struct {
|
|||||||
@"entry-point",
|
@"entry-point",
|
||||||
sourcemap,
|
sourcemap,
|
||||||
bytecode,
|
bytecode,
|
||||||
|
@"metafile-json",
|
||||||
|
@"metafile-markdown",
|
||||||
|
|
||||||
pub fn isFileInStandaloneMode(this: OutputKind) bool {
|
pub fn isFileInStandaloneMode(this: OutputKind) bool {
|
||||||
return this != .sourcemap and this != .bytecode;
|
return this != .sourcemap and this != .bytecode and this != .@"metafile-json" and this != .@"metafile-markdown";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
/**
|
/**
|
||||||
* Lazy getter for BuildOutput.metafile that parses JSON on first access.
|
* Lazy getter for BuildOutput.metafile that returns the parsed JSON directly.
|
||||||
* Uses CustomValue so the parsed result replaces the getter.
|
* Uses CustomValue so the parsed result replaces the getter.
|
||||||
|
*
|
||||||
|
* For backward compatibility, result.metafile returns the parsed JSON object directly
|
||||||
|
* (with inputs/outputs properties), not wrapped in { json: ... }.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "root.h"
|
#include "root.h"
|
||||||
#include "BunBuiltinNames.h"
|
#include "BunClientData.h"
|
||||||
#include "ZigGlobalObject.h"
|
#include "ZigGlobalObject.h"
|
||||||
|
|
||||||
#include <JavaScriptCore/CustomGetterSetter.h>
|
#include <JavaScriptCore/CustomGetterSetter.h>
|
||||||
@@ -15,6 +18,7 @@ namespace Bun {
|
|||||||
|
|
||||||
using namespace JSC;
|
using namespace JSC;
|
||||||
|
|
||||||
|
// Lazy getter for metafile property - returns parsed JSON directly for backward compatibility
|
||||||
JSC_DEFINE_CUSTOM_GETTER(bundlerMetafileLazyGetter, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName property))
|
JSC_DEFINE_CUSTOM_GETTER(bundlerMetafileLazyGetter, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName property))
|
||||||
{
|
{
|
||||||
auto& vm = JSC::getVM(globalObject);
|
auto& vm = JSC::getVM(globalObject);
|
||||||
@@ -26,9 +30,11 @@ JSC_DEFINE_CUSTOM_GETTER(bundlerMetafileLazyGetter, (JSGlobalObject * globalObje
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the raw JSON string from private property
|
// Get the raw JSON string from private property
|
||||||
const auto& privateName = Bun::builtinNames(vm).dataPrivateName();
|
const auto& privateName = WebCore::builtinNames(vm).metafileJsonPrivateName();
|
||||||
JSValue metafileStringValue = thisObject->getDirect(vm, privateName);
|
JSValue metafileStringValue = thisObject->getDirect(vm, privateName);
|
||||||
ASSERT(metafileStringValue.isString());
|
if (!metafileStringValue || !metafileStringValue.isString()) {
|
||||||
|
return JSValue::encode(jsUndefined());
|
||||||
|
}
|
||||||
|
|
||||||
auto str = metafileStringValue.toString(globalObject);
|
auto str = metafileStringValue.toString(globalObject);
|
||||||
RETURN_IF_EXCEPTION(scope, {});
|
RETURN_IF_EXCEPTION(scope, {});
|
||||||
@@ -49,17 +55,25 @@ JSC_DEFINE_CUSTOM_GETTER(bundlerMetafileLazyGetter, (JSGlobalObject * globalObje
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Helper to set up the lazy metafile on a BuildOutput object
|
// Helper to set up the lazy metafile on a BuildOutput object
|
||||||
extern "C" SYSV_ABI void Bun__setupLazyMetafile(JSC::JSGlobalObject* globalObject, JSC::EncodedJSValue buildOutputEncoded, JSC::EncodedJSValue metafileStringEncoded)
|
// For backward compatibility, metafile is the parsed JSON directly (not wrapped in { json: ... })
|
||||||
|
extern "C" SYSV_ABI void Bun__setupLazyMetafile(
|
||||||
|
JSC::JSGlobalObject* globalObject,
|
||||||
|
JSC::EncodedJSValue buildOutputEncoded,
|
||||||
|
JSC::EncodedJSValue metafileJsonStringEncoded,
|
||||||
|
JSC::EncodedJSValue metafileMarkdownStringEncoded)
|
||||||
{
|
{
|
||||||
auto& vm = JSC::getVM(globalObject);
|
auto& vm = JSC::getVM(globalObject);
|
||||||
JSObject* buildOutput = JSValue::decode(buildOutputEncoded).getObject();
|
JSObject* buildOutput = JSValue::decode(buildOutputEncoded).getObject();
|
||||||
ASSERT(buildOutput);
|
ASSERT(buildOutput);
|
||||||
|
|
||||||
// Store the raw JSON string in a private property
|
JSValue metafileJsonString = JSValue::decode(metafileJsonStringEncoded);
|
||||||
const auto& privateName = Bun::builtinNames(vm).dataPrivateName();
|
// metafileMarkdownString is currently unused for backward compatibility
|
||||||
buildOutput->putDirect(vm, privateName, JSValue::decode(metafileStringEncoded), 0);
|
// (we only set the JSON on result.metafile directly)
|
||||||
|
(void)metafileMarkdownStringEncoded;
|
||||||
|
|
||||||
// Set up the lazy getter
|
// Store raw JSON string in private property on buildOutput and set up lazy getter for "metafile"
|
||||||
|
// This returns the parsed JSON directly for backward compatibility with esbuild API
|
||||||
|
buildOutput->putDirect(vm, WebCore::builtinNames(vm).metafileJsonPrivateName(), metafileJsonString, 0);
|
||||||
buildOutput->putDirectCustomAccessor(
|
buildOutput->putDirectCustomAccessor(
|
||||||
vm,
|
vm,
|
||||||
Identifier::fromString(vm, "metafile"_s),
|
Identifier::fromString(vm, "metafile"_s),
|
||||||
|
|||||||
@@ -71,6 +71,10 @@ pub const LinkerContext = struct {
|
|||||||
source_maps: options.SourceMapOption = .none,
|
source_maps: options.SourceMapOption = .none,
|
||||||
target: options.Target = .browser,
|
target: options.Target = .browser,
|
||||||
metafile: bool = false,
|
metafile: bool = false,
|
||||||
|
/// Path to write JSON metafile (for Bun.build API)
|
||||||
|
metafile_json_path: []const u8 = "",
|
||||||
|
/// Path to write markdown metafile (for Bun.build API)
|
||||||
|
metafile_markdown_path: []const u8 = "",
|
||||||
|
|
||||||
mode: Mode = .bundle,
|
mode: Mode = .bundle,
|
||||||
|
|
||||||
|
|||||||
@@ -972,6 +972,8 @@ pub const BundleV2 = struct {
|
|||||||
this.linker.options.output_format = transpiler.options.output_format;
|
this.linker.options.output_format = transpiler.options.output_format;
|
||||||
this.linker.options.generate_bytecode_cache = transpiler.options.bytecode;
|
this.linker.options.generate_bytecode_cache = transpiler.options.bytecode;
|
||||||
this.linker.options.metafile = transpiler.options.metafile;
|
this.linker.options.metafile = transpiler.options.metafile;
|
||||||
|
this.linker.options.metafile_json_path = transpiler.options.metafile_json_path;
|
||||||
|
this.linker.options.metafile_markdown_path = transpiler.options.metafile_markdown_path;
|
||||||
|
|
||||||
this.linker.dev_server = transpiler.options.dev_server;
|
this.linker.dev_server = transpiler.options.dev_server;
|
||||||
|
|
||||||
@@ -1593,12 +1595,13 @@ pub const BundleV2 = struct {
|
|||||||
return .{
|
return .{
|
||||||
.output_files = std.array_list.Managed(options.OutputFile).init(alloc),
|
.output_files = std.array_list.Managed(options.OutputFile).init(alloc),
|
||||||
.metafile = null,
|
.metafile = null,
|
||||||
|
.metafile_markdown = null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const output_files = try this.linker.generateChunksInParallel(chunks, false);
|
const output_files = try this.linker.generateChunksInParallel(chunks, false);
|
||||||
|
|
||||||
// Generate metafile if requested
|
// Generate metafile if requested (CLI writes files in build_command.zig)
|
||||||
const metafile: ?[]const u8 = if (this.linker.options.metafile)
|
const metafile: ?[]const u8 = if (this.linker.options.metafile)
|
||||||
LinkerContext.MetafileBuilder.generate(bun.default_allocator, &this.linker, chunks) catch |err| blk: {
|
LinkerContext.MetafileBuilder.generate(bun.default_allocator, &this.linker, chunks) catch |err| blk: {
|
||||||
bun.Output.warn("Failed to generate metafile: {s}", .{@errorName(err)});
|
bun.Output.warn("Failed to generate metafile: {s}", .{@errorName(err)});
|
||||||
@@ -1607,9 +1610,11 @@ pub const BundleV2 = struct {
|
|||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
|
// Markdown is generated later in build_command.zig for CLI
|
||||||
return .{
|
return .{
|
||||||
.output_files = output_files,
|
.output_files = output_files,
|
||||||
.metafile = metafile,
|
.metafile = metafile,
|
||||||
|
.metafile_markdown = null,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1823,6 +1828,7 @@ pub const BundleV2 = struct {
|
|||||||
pub const BuildResult = struct {
|
pub const BuildResult = struct {
|
||||||
output_files: std.array_list.Managed(options.OutputFile),
|
output_files: std.array_list.Managed(options.OutputFile),
|
||||||
metafile: ?[]const u8 = null,
|
metafile: ?[]const u8 = null,
|
||||||
|
metafile_markdown: ?[]const u8 = null,
|
||||||
|
|
||||||
pub fn deinit(this: *BuildResult) void {
|
pub fn deinit(this: *BuildResult) void {
|
||||||
for (this.output_files.items) |*output_file| {
|
for (this.output_files.items) |*output_file| {
|
||||||
@@ -1835,6 +1841,11 @@ pub const BundleV2 = struct {
|
|||||||
bun.default_allocator.free(mf);
|
bun.default_allocator.free(mf);
|
||||||
this.metafile = null;
|
this.metafile = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.metafile_markdown) |md| {
|
||||||
|
bun.default_allocator.free(md);
|
||||||
|
this.metafile_markdown = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1984,6 +1995,8 @@ pub const BundleV2 = struct {
|
|||||||
transpiler.options.footer = config.footer.slice();
|
transpiler.options.footer = config.footer.slice();
|
||||||
transpiler.options.react_fast_refresh = config.react_fast_refresh;
|
transpiler.options.react_fast_refresh = config.react_fast_refresh;
|
||||||
transpiler.options.metafile = config.metafile;
|
transpiler.options.metafile = config.metafile;
|
||||||
|
transpiler.options.metafile_json_path = config.metafile_json_path.slice();
|
||||||
|
transpiler.options.metafile_markdown_path = config.metafile_markdown_path.slice();
|
||||||
|
|
||||||
if (transpiler.options.compile) {
|
if (transpiler.options.compile) {
|
||||||
// Emitting DCE annotations is nonsensical in --compile.
|
// Emitting DCE annotations is nonsensical in --compile.
|
||||||
@@ -2295,13 +2308,20 @@ pub const BundleV2 = struct {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add metafile if it was generated (lazy parsing via getter)
|
// Add metafile if it was generated
|
||||||
|
// metafile: { json: <lazy parsed>, markdown?: string }
|
||||||
if (build.metafile) |metafile| {
|
if (build.metafile) |metafile| {
|
||||||
const metafile_js_str = bun.String.createUTF8ForJS(globalThis, metafile) catch |err| {
|
const metafile_js_str = bun.String.createUTF8ForJS(globalThis, metafile) catch |err| {
|
||||||
return promise.reject(globalThis, err);
|
return promise.reject(globalThis, err);
|
||||||
};
|
};
|
||||||
// Set up lazy getter that parses JSON on first access and memoizes
|
const metafile_md_str: jsc.JSValue = if (build.metafile_markdown) |md|
|
||||||
Bun__setupLazyMetafile(globalThis, build_output, metafile_js_str);
|
(bun.String.createUTF8ForJS(globalThis, md) catch |err| {
|
||||||
|
return promise.reject(globalThis, err);
|
||||||
|
})
|
||||||
|
else
|
||||||
|
.js_undefined;
|
||||||
|
// Set up metafile object with json (lazy) and markdown (if present)
|
||||||
|
Bun__setupLazyMetafile(globalThis, build_output, metafile_js_str, metafile_md_str);
|
||||||
}
|
}
|
||||||
|
|
||||||
const didHandleCallbacks = if (this.plugins) |plugin| runOnEndCallbacks(globalThis, plugin, promise, build_output, .js_undefined) catch |err| {
|
const didHandleCallbacks = if (this.plugins) |plugin| runOnEndCallbacks(globalThis, plugin, promise, build_output, .js_undefined) catch |err| {
|
||||||
@@ -2738,7 +2758,7 @@ pub const BundleV2 = struct {
|
|||||||
return error.BuildFailed;
|
return error.BuildFailed;
|
||||||
}
|
}
|
||||||
|
|
||||||
const output_files = try this.linker.generateChunksInParallel(chunks, false);
|
var output_files = try this.linker.generateChunksInParallel(chunks, false);
|
||||||
|
|
||||||
// Generate metafile if requested
|
// Generate metafile if requested
|
||||||
const metafile: ?[]const u8 = if (this.linker.options.metafile)
|
const metafile: ?[]const u8 = if (this.linker.options.metafile)
|
||||||
@@ -2749,12 +2769,96 @@ pub const BundleV2 = struct {
|
|||||||
else
|
else
|
||||||
null;
|
null;
|
||||||
|
|
||||||
|
// Generate markdown if metafile was generated and path specified
|
||||||
|
const metafile_markdown: ?[]const u8 = if (this.linker.options.metafile_markdown_path.len > 0 and metafile != null)
|
||||||
|
LinkerContext.MetafileBuilder.generateMarkdown(bun.default_allocator, metafile.?) catch |err| blk: {
|
||||||
|
bun.Output.warn("Failed to generate metafile markdown: {s}", .{@errorName(err)});
|
||||||
|
break :blk null;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
null;
|
||||||
|
|
||||||
|
// Write metafile outputs to disk and add them as OutputFiles.
|
||||||
|
// Metafile paths are relative to outdir, like all other output files.
|
||||||
|
const outdir = this.linker.resolver.opts.output_dir;
|
||||||
|
if (this.linker.options.metafile_json_path.len > 0) {
|
||||||
|
if (metafile) |mf| {
|
||||||
|
try writeMetafileOutput(&output_files, outdir, this.linker.options.metafile_json_path, mf, .@"metafile-json");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.linker.options.metafile_markdown_path.len > 0) {
|
||||||
|
if (metafile_markdown) |md| {
|
||||||
|
try writeMetafileOutput(&output_files, outdir, this.linker.options.metafile_markdown_path, md, .@"metafile-markdown");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.output_files = output_files,
|
.output_files = output_files,
|
||||||
.metafile = metafile,
|
.metafile = metafile,
|
||||||
|
.metafile_markdown = metafile_markdown,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Writes a metafile (JSON or markdown) to disk and appends it to the output_files list.
|
||||||
|
/// Metafile paths are relative to outdir, like all other output files.
|
||||||
|
fn writeMetafileOutput(
|
||||||
|
output_files: *std.array_list.Managed(options.OutputFile),
|
||||||
|
outdir: []const u8,
|
||||||
|
file_path: []const u8,
|
||||||
|
content: []const u8,
|
||||||
|
output_kind: jsc.API.BuildArtifact.OutputKind,
|
||||||
|
) !void {
|
||||||
|
if (outdir.len > 0) {
|
||||||
|
// Open the output directory
|
||||||
|
var root_dir = bun.FD.cwd().stdDir().makeOpenPath(outdir, .{}) catch |err| {
|
||||||
|
bun.Output.warn("Failed to open output directory '{s}': {s}", .{ outdir, @errorName(err) });
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
defer root_dir.close();
|
||||||
|
|
||||||
|
// Create parent directories if needed (relative to outdir)
|
||||||
|
if (std.fs.path.dirname(file_path)) |parent| {
|
||||||
|
if (parent.len > 0) {
|
||||||
|
root_dir.makePath(parent) catch {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to disk relative to outdir
|
||||||
|
var path_buf: bun.PathBuffer = undefined;
|
||||||
|
_ = jsc.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
||||||
|
.data = .{ .buffer = .{
|
||||||
|
.buffer = .{
|
||||||
|
.ptr = @constCast(content.ptr),
|
||||||
|
.len = @as(u32, @truncate(content.len)),
|
||||||
|
.byte_len = @as(u32, @truncate(content.len)),
|
||||||
|
},
|
||||||
|
} },
|
||||||
|
.encoding = .buffer,
|
||||||
|
.mode = 0o644,
|
||||||
|
.dirfd = bun.FD.fromStdDir(root_dir),
|
||||||
|
.file = .{ .path = .{
|
||||||
|
.string = bun.PathString.init(file_path),
|
||||||
|
} },
|
||||||
|
}).unwrap() catch |err| {
|
||||||
|
bun.Output.warn("Failed to write metafile to '{s}': {s}", .{ file_path, @errorName(err) });
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add as OutputFile so it appears in result.outputs
|
||||||
|
const is_json = output_kind == .@"metafile-json";
|
||||||
|
try output_files.append(options.OutputFile.init(.{
|
||||||
|
.loader = if (is_json) .json else .file,
|
||||||
|
.input_loader = if (is_json) .json else .file,
|
||||||
|
.input_path = bun.handleOom(bun.default_allocator.dupe(u8, if (is_json) "metafile.json" else "metafile.md")),
|
||||||
|
.output_path = bun.handleOom(bun.default_allocator.dupe(u8, file_path)),
|
||||||
|
.data = .{ .saved = content.len },
|
||||||
|
.output_kind = output_kind,
|
||||||
|
.is_executable = false,
|
||||||
|
.side = null,
|
||||||
|
.entry_point_index = null,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
fn shouldAddWatcherPlugin(bv2: *BundleV2, namespace: []const u8, path: []const u8) bool {
|
fn shouldAddWatcherPlugin(bv2: *BundleV2, namespace: []const u8, path: []const u8) bool {
|
||||||
return bun.strings.eqlComptime(namespace, "file") and
|
return bun.strings.eqlComptime(namespace, "file") and
|
||||||
std.fs.path.isAbsolute(path) and
|
std.fs.path.isAbsolute(path) and
|
||||||
@@ -4721,7 +4825,8 @@ const string = []const u8;
|
|||||||
|
|
||||||
// C++ binding for lazy metafile getter (defined in BundlerMetafile.cpp)
|
// C++ binding for lazy metafile getter (defined in BundlerMetafile.cpp)
|
||||||
// Uses jsc.conv (SYSV_ABI on Windows x64) for proper calling convention
|
// Uses jsc.conv (SYSV_ABI on Windows x64) for proper calling convention
|
||||||
extern "C" fn Bun__setupLazyMetafile(globalThis: *jsc.JSGlobalObject, buildOutput: jsc.JSValue, metafileString: jsc.JSValue) callconv(jsc.conv) void;
|
// Sets up metafile object with { json: <lazy parsed>, markdown?: string }
|
||||||
|
extern "C" fn Bun__setupLazyMetafile(globalThis: *jsc.JSGlobalObject, buildOutput: jsc.JSValue, metafileJsonString: jsc.JSValue, metafileMarkdownString: jsc.JSValue) callconv(jsc.conv) void;
|
||||||
|
|
||||||
const options = @import("../options.zig");
|
const options = @import("../options.zig");
|
||||||
|
|
||||||
|
|||||||
@@ -344,6 +344,737 @@ fn writeJSONString(writer: anytype, str: []const u8) !void {
|
|||||||
try writer.print("{f}", .{bun.fmt.formatJSONStringUTF8(str, .{})});
|
try writer.print("{f}", .{bun.fmt.formatJSONStringUTF8(str, .{})});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates a markdown visualization of the module graph from metafile JSON.
|
||||||
|
/// This is a post-processing step that parses the JSON and produces LLM-friendly output.
|
||||||
|
/// Designed to help diagnose bundle bloat, dependency chains, and entry point analysis.
|
||||||
|
/// The caller is responsible for freeing the returned slice.
|
||||||
|
pub fn generateMarkdown(allocator: std.mem.Allocator, metafile_json: []const u8) ![]const u8 {
|
||||||
|
const parsed = std.json.parseFromSlice(std.json.Value, allocator, metafile_json, .{}) catch {
|
||||||
|
return error.InvalidJSON;
|
||||||
|
};
|
||||||
|
defer parsed.deinit();
|
||||||
|
|
||||||
|
const root = parsed.value;
|
||||||
|
if (root != .object) return error.InvalidJSON;
|
||||||
|
|
||||||
|
var md = std.array_list.Managed(u8).init(allocator);
|
||||||
|
errdefer md.deinit();
|
||||||
|
const writer = md.writer();
|
||||||
|
|
||||||
|
// Get inputs and outputs
|
||||||
|
const inputs = root.object.get("inputs") orelse return error.InvalidJSON;
|
||||||
|
const outputs = root.object.get("outputs") orelse return error.InvalidJSON;
|
||||||
|
|
||||||
|
if (inputs != .object or outputs != .object) return error.InvalidJSON;
|
||||||
|
|
||||||
|
// Header
|
||||||
|
try writer.writeAll("# Bundle Analysis Report\n\n");
|
||||||
|
try writer.writeAll("This report helps identify bundle size issues, dependency bloat, and optimization opportunities.\n\n");
|
||||||
|
|
||||||
|
// Table of Contents for easy navigation
|
||||||
|
try writer.writeAll("## Table of Contents\n\n");
|
||||||
|
try writer.writeAll("- [Quick Summary](#quick-summary)\n");
|
||||||
|
try writer.writeAll("- [Largest Modules by Output Contribution](#largest-modules-by-output-contribution)\n");
|
||||||
|
try writer.writeAll("- [Entry Point Analysis](#entry-point-analysis)\n");
|
||||||
|
try writer.writeAll("- [Dependency Chains](#dependency-chains)\n");
|
||||||
|
try writer.writeAll("- [Full Module Graph](#full-module-graph)\n");
|
||||||
|
try writer.writeAll("- [Raw Data for Searching](#raw-data-for-searching)\n\n");
|
||||||
|
try writer.writeAll("---\n\n");
|
||||||
|
|
||||||
|
// ==================== SUMMARY ====================
|
||||||
|
try writer.writeAll("## Quick Summary\n\n");
|
||||||
|
|
||||||
|
var total_output_bytes: u64 = 0;
|
||||||
|
var esm_count: u32 = 0;
|
||||||
|
var cjs_count: u32 = 0;
|
||||||
|
var json_count: u32 = 0;
|
||||||
|
var external_count: u32 = 0;
|
||||||
|
var node_modules_count: u32 = 0;
|
||||||
|
var node_modules_bytes: u64 = 0;
|
||||||
|
|
||||||
|
// Build a map of module path -> bytesInOutput (bytes contributed to output)
|
||||||
|
// This aggregates from all outputs since a module may appear in multiple chunks
|
||||||
|
var bytes_in_output = bun.StringHashMap(u64).init(allocator);
|
||||||
|
defer bytes_in_output.deinit();
|
||||||
|
|
||||||
|
// First pass through outputs to collect bytesInOutput for each module
|
||||||
|
var output_iter_first = outputs.object.iterator();
|
||||||
|
while (output_iter_first.next()) |out_entry| {
|
||||||
|
const output = out_entry.value_ptr.*;
|
||||||
|
if (output != .object) continue;
|
||||||
|
|
||||||
|
if (output.object.get("inputs")) |output_inputs| {
|
||||||
|
if (output_inputs == .object) {
|
||||||
|
var oi_iter = output_inputs.object.iterator();
|
||||||
|
while (oi_iter.next()) |oi_entry| {
|
||||||
|
const module_path = oi_entry.key_ptr.*;
|
||||||
|
const module_info = oi_entry.value_ptr.*;
|
||||||
|
if (module_info == .object) {
|
||||||
|
if (module_info.object.get("bytesInOutput")) |bio| {
|
||||||
|
if (bio == .integer) {
|
||||||
|
const bytes_val: u64 = @intCast(bio.integer);
|
||||||
|
const gop = try bytes_in_output.getOrPut(module_path);
|
||||||
|
if (gop.found_existing) {
|
||||||
|
gop.value_ptr.* += bytes_val;
|
||||||
|
} else {
|
||||||
|
gop.value_ptr.* = bytes_val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build reverse dependency map: who imports each file?
|
||||||
|
// Also collect input file data for sorting
|
||||||
|
const InputFileInfo = struct {
|
||||||
|
path: []const u8,
|
||||||
|
bytes_in_output: u64,
|
||||||
|
import_count: u32,
|
||||||
|
is_node_modules: bool,
|
||||||
|
format: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
var input_files: std.ArrayListUnmanaged(InputFileInfo) = .{};
|
||||||
|
defer input_files.deinit(allocator);
|
||||||
|
|
||||||
|
var imported_by = bun.StringHashMap(std.ArrayListUnmanaged([]const u8)).init(allocator);
|
||||||
|
defer {
|
||||||
|
var it = imported_by.valueIterator();
|
||||||
|
while (it.next()) |list| {
|
||||||
|
list.deinit(allocator);
|
||||||
|
}
|
||||||
|
imported_by.deinit();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: collect all input file info and build reverse dependency map
|
||||||
|
var input_iter = inputs.object.iterator();
|
||||||
|
while (input_iter.next()) |entry| {
|
||||||
|
const path = entry.key_ptr.*;
|
||||||
|
const input = entry.value_ptr.*;
|
||||||
|
if (input != .object) continue;
|
||||||
|
|
||||||
|
const is_node_modules = std.mem.indexOf(u8, path, "node_modules") != null;
|
||||||
|
const module_bytes = bytes_in_output.get(path) orelse 0;
|
||||||
|
|
||||||
|
var info = InputFileInfo{
|
||||||
|
.path = path,
|
||||||
|
.bytes_in_output = module_bytes,
|
||||||
|
.import_count = 0,
|
||||||
|
.is_node_modules = is_node_modules,
|
||||||
|
.format = "",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (is_node_modules) {
|
||||||
|
node_modules_bytes += module_bytes;
|
||||||
|
node_modules_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input.object.get("format")) |format| {
|
||||||
|
if (format == .string) {
|
||||||
|
info.format = format.string;
|
||||||
|
if (std.mem.eql(u8, format.string, "esm")) {
|
||||||
|
esm_count += 1;
|
||||||
|
} else if (std.mem.eql(u8, format.string, "cjs")) {
|
||||||
|
cjs_count += 1;
|
||||||
|
} else if (std.mem.eql(u8, format.string, "json")) {
|
||||||
|
json_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build reverse dependency map
|
||||||
|
if (input.object.get("imports")) |imps| {
|
||||||
|
if (imps == .array) {
|
||||||
|
info.import_count = @intCast(imps.array.items.len);
|
||||||
|
for (imps.array.items) |imp| {
|
||||||
|
if (imp == .object) {
|
||||||
|
if (imp.object.get("external")) |ext| {
|
||||||
|
if (ext == .bool and ext.bool) {
|
||||||
|
external_count += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (imp.object.get("path")) |imp_path| {
|
||||||
|
if (imp_path == .string) {
|
||||||
|
// Try to find the matching input key for this import
|
||||||
|
// The import path may be absolute while input keys are relative
|
||||||
|
// Or it may be a relative path like "../utils/logger.js"
|
||||||
|
const target = imp_path.string;
|
||||||
|
|
||||||
|
// First, try exact match
|
||||||
|
var matched_key: ?[]const u8 = null;
|
||||||
|
if (inputs.object.contains(target)) {
|
||||||
|
matched_key = target;
|
||||||
|
} else {
|
||||||
|
// Try matching by basename or suffix
|
||||||
|
var key_iter = inputs.object.iterator();
|
||||||
|
while (key_iter.next()) |key_entry| {
|
||||||
|
const input_key = key_entry.key_ptr.*;
|
||||||
|
// Check if target ends with the input key
|
||||||
|
if (std.mem.endsWith(u8, target, input_key)) {
|
||||||
|
// Make sure it's a path boundary (preceded by / or \ or start)
|
||||||
|
if (target.len == input_key.len or
|
||||||
|
(target.len > input_key.len and (target[target.len - input_key.len - 1] == '/' or target[target.len - input_key.len - 1] == '\\')))
|
||||||
|
{
|
||||||
|
matched_key = input_key;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Also check if input_key ends with target (for relative paths)
|
||||||
|
// e.g., target="../utils/logger.js" might match "src/utils/logger.js"
|
||||||
|
if (std.mem.indexOf(u8, target, "..") != null) {
|
||||||
|
// This is a relative path, try matching just the filename parts
|
||||||
|
const target_base = std.fs.path.basename(target);
|
||||||
|
const key_base = std.fs.path.basename(input_key);
|
||||||
|
if (std.mem.eql(u8, target_base, key_base)) {
|
||||||
|
// Check if paths share common suffix
|
||||||
|
const target_without_dots = stripParentRefs(target);
|
||||||
|
if (std.mem.endsWith(u8, input_key, target_without_dots)) {
|
||||||
|
matched_key = input_key;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (matched_key) |key| {
|
||||||
|
const gop = try imported_by.getOrPut(key);
|
||||||
|
if (!gop.found_existing) {
|
||||||
|
gop.value_ptr.* = .{};
|
||||||
|
}
|
||||||
|
try gop.value_ptr.append(allocator, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try input_files.append(allocator, info);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count outputs and entry points
|
||||||
|
var entry_point_count: u32 = 0;
|
||||||
|
var chunk_count: u32 = 0;
|
||||||
|
var output_iter = outputs.object.iterator();
|
||||||
|
while (output_iter.next()) |entry| {
|
||||||
|
if (entry.value_ptr.* == .object) {
|
||||||
|
if (entry.value_ptr.object.get("bytes")) |bytes| {
|
||||||
|
if (bytes == .integer) {
|
||||||
|
total_output_bytes += @intCast(bytes.integer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (entry.value_ptr.object.get("entryPoint")) |_| {
|
||||||
|
entry_point_count += 1;
|
||||||
|
} else {
|
||||||
|
chunk_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary table
|
||||||
|
try writer.writeAll("| Metric | Value |\n");
|
||||||
|
try writer.writeAll("|--------|-------|\n");
|
||||||
|
try writer.print("| Total output size | {f} |\n", .{bun.fmt.size(total_output_bytes, .{})});
|
||||||
|
try writer.print("| Input modules | {d} |\n", .{inputs.object.count()});
|
||||||
|
if (entry_point_count > 0) {
|
||||||
|
try writer.print("| Entry points | {d} |\n", .{entry_point_count});
|
||||||
|
}
|
||||||
|
if (chunk_count > 0) {
|
||||||
|
try writer.print("| Code-split chunks | {d} |\n", .{chunk_count});
|
||||||
|
}
|
||||||
|
if (node_modules_count > 0) {
|
||||||
|
try writer.print("| node_modules contribution | {d} files ({f}) |\n", .{ node_modules_count, bun.fmt.size(node_modules_bytes, .{}) });
|
||||||
|
}
|
||||||
|
if (esm_count > 0) try writer.print("| ESM modules | {d} |\n", .{esm_count});
|
||||||
|
if (cjs_count > 0) try writer.print("| CommonJS modules | {d} |\n", .{cjs_count});
|
||||||
|
if (json_count > 0) try writer.print("| JSON files | {d} |\n", .{json_count});
|
||||||
|
if (external_count > 0) try writer.print("| External imports | {d} |\n", .{external_count});
|
||||||
|
|
||||||
|
// ==================== LARGEST MODULES (BLOAT ANALYSIS) ====================
|
||||||
|
try writer.writeAll("\n## Largest Modules by Output Contribution\n\n");
|
||||||
|
try writer.writeAll("Modules sorted by bytes contributed to the output bundle. Large modules may indicate bloat.\n\n");
|
||||||
|
|
||||||
|
// Sort by bytes_in_output descending
|
||||||
|
std.mem.sort(InputFileInfo, input_files.items, {}, struct {
|
||||||
|
fn lessThan(_: void, a: InputFileInfo, b: InputFileInfo) bool {
|
||||||
|
return a.bytes_in_output > b.bytes_in_output;
|
||||||
|
}
|
||||||
|
}.lessThan);
|
||||||
|
|
||||||
|
try writer.writeAll("| Output Bytes | % of Total | Module | Format |\n");
|
||||||
|
try writer.writeAll("|--------------|------------|--------|--------|\n");
|
||||||
|
|
||||||
|
const max_to_show: usize = 20;
|
||||||
|
for (input_files.items, 0..) |info, i| {
|
||||||
|
if (i >= max_to_show) break;
|
||||||
|
if (info.bytes_in_output == 0) break; // Skip modules with no output contribution
|
||||||
|
const pct = if (total_output_bytes > 0)
|
||||||
|
@as(f64, @floatFromInt(info.bytes_in_output)) / @as(f64, @floatFromInt(total_output_bytes)) * 100.0
|
||||||
|
else
|
||||||
|
0.0;
|
||||||
|
try writer.print("| {f} | {d:.1}% | `{s}` | {s} |\n", .{
|
||||||
|
bun.fmt.size(info.bytes_in_output, .{}),
|
||||||
|
pct,
|
||||||
|
info.path,
|
||||||
|
if (info.format.len > 0) info.format else "-",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count remaining modules with non-zero contribution
|
||||||
|
var remaining_count: usize = 0;
|
||||||
|
if (input_files.items.len > max_to_show) {
|
||||||
|
for (input_files.items[max_to_show..]) |info| {
|
||||||
|
if (info.bytes_in_output > 0) remaining_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (remaining_count > 0) {
|
||||||
|
try writer.print("\n*...and {d} more modules with output contribution*\n", .{remaining_count});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== ENTRY POINT ANALYSIS ====================
|
||||||
|
try writer.writeAll("\n## Entry Point Analysis\n\n");
|
||||||
|
try writer.writeAll("Each entry point and the total code it loads (including shared chunks).\n\n");
|
||||||
|
|
||||||
|
var out_iter2 = outputs.object.iterator();
|
||||||
|
while (out_iter2.next()) |entry| {
|
||||||
|
const output_path = entry.key_ptr.*;
|
||||||
|
const output = entry.value_ptr.*;
|
||||||
|
if (output != .object) continue;
|
||||||
|
|
||||||
|
const entry_point = output.object.get("entryPoint") orelse continue;
|
||||||
|
if (entry_point != .string) continue;
|
||||||
|
|
||||||
|
try writer.print("### Entry: `{s}`\n\n", .{entry_point.string});
|
||||||
|
|
||||||
|
// Output file info
|
||||||
|
try writer.print("**Output file**: `{s}`\n", .{output_path});
|
||||||
|
|
||||||
|
if (output.object.get("bytes")) |bytes| {
|
||||||
|
if (bytes == .integer) {
|
||||||
|
try writer.print("**Bundle size**: {f}\n", .{bun.fmt.size(@as(u64, @intCast(bytes.integer)), .{})});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CSS bundle
|
||||||
|
if (output.object.get("cssBundle")) |css_bundle| {
|
||||||
|
if (css_bundle == .string) {
|
||||||
|
try writer.print("**CSS bundle**: `{s}`\n", .{css_bundle.string});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exports
|
||||||
|
if (output.object.get("exports")) |exports| {
|
||||||
|
if (exports == .array and exports.array.items.len > 0) {
|
||||||
|
try writer.writeAll("**Exports**: ");
|
||||||
|
var first = true;
|
||||||
|
const max_exports: usize = 10;
|
||||||
|
for (exports.array.items, 0..) |exp, i| {
|
||||||
|
if (i >= max_exports) {
|
||||||
|
try writer.print(" ...+{d} more", .{exports.array.items.len - max_exports});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (exp == .string) {
|
||||||
|
if (!first) try writer.writeAll(", ");
|
||||||
|
first = false;
|
||||||
|
try writer.print("`{s}`", .{exp.string});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chunk dependencies
|
||||||
|
if (output.object.get("imports")) |chunk_imports| {
|
||||||
|
if (chunk_imports == .array and chunk_imports.array.items.len > 0) {
|
||||||
|
try writer.writeAll("\n**Loads these chunks** (code-splitting):\n");
|
||||||
|
for (chunk_imports.array.items) |imp| {
|
||||||
|
if (imp == .object) {
|
||||||
|
const path = imp.object.get("path") orelse continue;
|
||||||
|
const kind = imp.object.get("kind") orelse continue;
|
||||||
|
if (path == .string and kind == .string) {
|
||||||
|
// Try to get chunk size
|
||||||
|
if (outputs.object.get(path.string)) |chunk| {
|
||||||
|
if (chunk == .object) {
|
||||||
|
if (chunk.object.get("bytes")) |bytes| {
|
||||||
|
if (bytes == .integer) {
|
||||||
|
try writer.print("- `{s}` ({f}, {s})\n", .{
|
||||||
|
path.string,
|
||||||
|
bun.fmt.size(@as(u64, @intCast(bytes.integer)), .{}),
|
||||||
|
kind.string,
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.print("- `{s}` ({s})\n", .{ path.string, kind.string });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Modules bundled into this entry
|
||||||
|
if (output.object.get("inputs")) |output_inputs| {
|
||||||
|
if (output_inputs == .object and output_inputs.object.count() > 0) {
|
||||||
|
try writer.writeAll("\n**Bundled modules** (sorted by contribution):\n\n");
|
||||||
|
try writer.writeAll("| Bytes | Module |\n");
|
||||||
|
try writer.writeAll("|-------|--------|\n");
|
||||||
|
|
||||||
|
// Collect and sort by size
|
||||||
|
const ModuleSize = struct { path: []const u8, bytes: u64 };
|
||||||
|
var module_sizes: std.ArrayListUnmanaged(ModuleSize) = .{};
|
||||||
|
defer module_sizes.deinit(allocator);
|
||||||
|
|
||||||
|
var oi_iter = output_inputs.object.iterator();
|
||||||
|
while (oi_iter.next()) |oi_entry| {
|
||||||
|
const module_path = oi_entry.key_ptr.*;
|
||||||
|
const module_info = oi_entry.value_ptr.*;
|
||||||
|
if (module_info == .object) {
|
||||||
|
if (module_info.object.get("bytesInOutput")) |bio| {
|
||||||
|
if (bio == .integer) {
|
||||||
|
try module_sizes.append(allocator, .{ .path = module_path, .bytes = @intCast(bio.integer) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std.mem.sort(ModuleSize, module_sizes.items, {}, struct {
|
||||||
|
fn lessThan(_: void, a: ModuleSize, b: ModuleSize) bool {
|
||||||
|
return a.bytes > b.bytes;
|
||||||
|
}
|
||||||
|
}.lessThan);
|
||||||
|
|
||||||
|
const max_modules: usize = 15;
|
||||||
|
for (module_sizes.items, 0..) |ms, i| {
|
||||||
|
if (i >= max_modules) break;
|
||||||
|
try writer.print("| {f} | `{s}` |\n", .{ bun.fmt.size(ms.bytes, .{}), ms.path });
|
||||||
|
}
|
||||||
|
if (module_sizes.items.len > max_modules) {
|
||||||
|
try writer.print("\n*...and {d} more modules*\n", .{module_sizes.items.len - max_modules});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try writer.writeAll("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== DEPENDENCY CHAINS (WHY IS THIS INCLUDED?) ====================
|
||||||
|
try writer.writeAll("## Dependency Chains\n\n");
|
||||||
|
try writer.writeAll("For each module, shows what files import it. Use this to understand why a module is included.\n\n");
|
||||||
|
|
||||||
|
// Show modules that are imported by many files (potential optimization targets)
|
||||||
|
const ImportedByInfo = struct { path: []const u8, count: usize };
|
||||||
|
var highly_imported: std.ArrayListUnmanaged(ImportedByInfo) = .{};
|
||||||
|
defer highly_imported.deinit(allocator);
|
||||||
|
|
||||||
|
var ib_iter = imported_by.iterator();
|
||||||
|
while (ib_iter.next()) |entry| {
|
||||||
|
try highly_imported.append(allocator, .{ .path = entry.key_ptr.*, .count = entry.value_ptr.items.len });
|
||||||
|
}
|
||||||
|
|
||||||
|
std.mem.sort(ImportedByInfo, highly_imported.items, {}, struct {
|
||||||
|
fn lessThan(_: void, a: ImportedByInfo, b: ImportedByInfo) bool {
|
||||||
|
return a.count > b.count;
|
||||||
|
}
|
||||||
|
}.lessThan);
|
||||||
|
|
||||||
|
// Show most commonly imported modules
|
||||||
|
if (highly_imported.items.len > 0) {
|
||||||
|
try writer.writeAll("### Most Commonly Imported Modules\n\n");
|
||||||
|
try writer.writeAll("Modules imported by many files. Extracting these to shared chunks may help.\n\n");
|
||||||
|
try writer.writeAll("| Import Count | Module | Imported By |\n");
|
||||||
|
try writer.writeAll("|--------------|--------|-------------|\n");
|
||||||
|
|
||||||
|
const max_common: usize = 15;
|
||||||
|
for (highly_imported.items, 0..) |hi, i| {
|
||||||
|
if (i >= max_common) break;
|
||||||
|
if (hi.count < 2) break; // Only show if imported by 2+ files
|
||||||
|
|
||||||
|
try writer.print("| {d} | `{s}` | ", .{ hi.count, hi.path });
|
||||||
|
|
||||||
|
// Show first few importers
|
||||||
|
if (imported_by.get(hi.path)) |importers| {
|
||||||
|
const max_importers: usize = 3;
|
||||||
|
for (importers.items, 0..) |importer, j| {
|
||||||
|
if (j >= max_importers) {
|
||||||
|
try writer.print("+{d} more", .{importers.items.len - max_importers});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (j > 0) try writer.writeAll(", ");
|
||||||
|
try writer.print("`{s}`", .{importer});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll(" |\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== FULL MODULE GRAPH ====================
|
||||||
|
try writer.writeAll("\n## Full Module Graph\n\n");
|
||||||
|
try writer.writeAll("Complete dependency information for each module.\n\n");
|
||||||
|
|
||||||
|
// Sort inputs alphabetically for easier navigation
|
||||||
|
const PathOnly = struct { path: []const u8 };
|
||||||
|
var sorted_paths: std.ArrayListUnmanaged(PathOnly) = .{};
|
||||||
|
defer sorted_paths.deinit(allocator);
|
||||||
|
|
||||||
|
var path_iter = inputs.object.iterator();
|
||||||
|
while (path_iter.next()) |entry| {
|
||||||
|
try sorted_paths.append(allocator, .{ .path = entry.key_ptr.* });
|
||||||
|
}
|
||||||
|
|
||||||
|
std.mem.sort(PathOnly, sorted_paths.items, {}, struct {
|
||||||
|
fn lessThan(_: void, a: PathOnly, b: PathOnly) bool {
|
||||||
|
return std.mem.lessThan(u8, a.path, b.path);
|
||||||
|
}
|
||||||
|
}.lessThan);
|
||||||
|
|
||||||
|
for (sorted_paths.items) |sp| {
|
||||||
|
const input_path = sp.path;
|
||||||
|
const input = inputs.object.get(input_path) orelse continue;
|
||||||
|
if (input != .object) continue;
|
||||||
|
|
||||||
|
try writer.print("### `{s}`\n\n", .{input_path});
|
||||||
|
|
||||||
|
// Show bytes contributed to output
|
||||||
|
if (bytes_in_output.get(input_path)) |contrib| {
|
||||||
|
if (contrib > 0) {
|
||||||
|
try writer.print("- **Output contribution**: {f}\n", .{bun.fmt.size(contrib, .{})});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input.object.get("format")) |format| {
|
||||||
|
if (format == .string) {
|
||||||
|
try writer.print("- **Format**: {s}\n", .{format.string});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Who imports this file?
|
||||||
|
if (imported_by.get(input_path)) |importers| {
|
||||||
|
try writer.print("- **Imported by** ({d} files):", .{importers.items.len});
|
||||||
|
if (importers.items.len <= 5) {
|
||||||
|
for (importers.items) |importer| {
|
||||||
|
try writer.print(" `{s}`", .{importer});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (importers.items[0..5]) |importer| {
|
||||||
|
try writer.print(" `{s}`", .{importer});
|
||||||
|
}
|
||||||
|
try writer.print(" +{d} more", .{importers.items.len - 5});
|
||||||
|
}
|
||||||
|
try writer.writeAll("\n");
|
||||||
|
} else {
|
||||||
|
// This is likely an entry point
|
||||||
|
try writer.writeAll("- **Imported by**: (entry point or orphan)\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// What does this file import?
|
||||||
|
if (input.object.get("imports")) |imps| {
|
||||||
|
if (imps == .array and imps.array.items.len > 0) {
|
||||||
|
try writer.writeAll("- **Imports**:\n");
|
||||||
|
for (imps.array.items) |imp| {
|
||||||
|
if (imp == .object) {
|
||||||
|
const path = imp.object.get("path") orelse continue;
|
||||||
|
const kind = imp.object.get("kind") orelse continue;
|
||||||
|
if (path != .string or kind != .string) continue;
|
||||||
|
|
||||||
|
const is_external = blk: {
|
||||||
|
if (imp.object.get("external")) |ext| {
|
||||||
|
if (ext == .bool) break :blk ext.bool;
|
||||||
|
}
|
||||||
|
break :blk false;
|
||||||
|
};
|
||||||
|
|
||||||
|
const original = blk: {
|
||||||
|
if (imp.object.get("original")) |orig| {
|
||||||
|
if (orig == .string) break :blk orig.string;
|
||||||
|
}
|
||||||
|
break :blk null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get output contribution of imported file if available
|
||||||
|
const imported_contrib: ?u64 = if (!is_external) bytes_in_output.get(path.string) else null;
|
||||||
|
|
||||||
|
if (is_external) {
|
||||||
|
if (original) |orig| {
|
||||||
|
try writer.print(" - `{s}` ({s}, **external**, specifier: `{s}`)\n", .{ path.string, kind.string, orig });
|
||||||
|
} else {
|
||||||
|
try writer.print(" - `{s}` ({s}, **external**)\n", .{ path.string, kind.string });
|
||||||
|
}
|
||||||
|
} else if (imported_contrib) |contrib| {
|
||||||
|
if (contrib > 0) {
|
||||||
|
if (original) |orig| {
|
||||||
|
try writer.print(" - `{s}` ({s}, contributes {f}, specifier: `{s}`)\n", .{ path.string, kind.string, bun.fmt.size(contrib, .{}), orig });
|
||||||
|
} else {
|
||||||
|
try writer.print(" - `{s}` ({s}, contributes {f})\n", .{ path.string, kind.string, bun.fmt.size(contrib, .{}) });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (original) |orig| {
|
||||||
|
try writer.print(" - `{s}` ({s}, specifier: `{s}`)\n", .{ path.string, kind.string, orig });
|
||||||
|
} else {
|
||||||
|
try writer.print(" - `{s}` ({s})\n", .{ path.string, kind.string });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (original) |orig| {
|
||||||
|
try writer.print(" - `{s}` ({s}, specifier: `{s}`)\n", .{ path.string, kind.string, orig });
|
||||||
|
} else {
|
||||||
|
try writer.print(" - `{s}` ({s})\n", .{ path.string, kind.string });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show import attributes if present
|
||||||
|
if (imp.object.get("with")) |with| {
|
||||||
|
if (with == .object) {
|
||||||
|
if (with.object.get("type")) |type_val| {
|
||||||
|
if (type_val == .string) {
|
||||||
|
try writer.print(" - with type: `{s}`\n", .{type_val.string});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try writer.writeAll("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== RAW DATA FOR SEARCHING ====================
|
||||||
|
try writer.writeAll("## Raw Data for Searching\n\n");
|
||||||
|
try writer.writeAll("This section contains raw, grep-friendly data. Use these patterns:\n");
|
||||||
|
try writer.writeAll("- `[MODULE:` - Find all modules\n");
|
||||||
|
try writer.writeAll("- `[OUTPUT_BYTES:` - Find output contribution for each module\n");
|
||||||
|
try writer.writeAll("- `[IMPORT:` - Find all import relationships\n");
|
||||||
|
try writer.writeAll("- `[IMPORTED_BY:` - Find reverse dependencies\n");
|
||||||
|
try writer.writeAll("- `[ENTRY:` - Find entry points\n");
|
||||||
|
try writer.writeAll("- `[EXTERNAL:` - Find external imports\n");
|
||||||
|
try writer.writeAll("- `[NODE_MODULES:` - Find node_modules files\n\n");
|
||||||
|
|
||||||
|
// All modules with output contribution
|
||||||
|
try writer.writeAll("### All Modules\n\n");
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
for (input_files.items) |info| {
|
||||||
|
try writer.print("[MODULE: {s}]\n", .{info.path});
|
||||||
|
if (info.bytes_in_output > 0) {
|
||||||
|
try writer.print("[OUTPUT_BYTES: {s} = {d} bytes]\n", .{ info.path, info.bytes_in_output });
|
||||||
|
}
|
||||||
|
if (info.format.len > 0) {
|
||||||
|
try writer.print("[FORMAT: {s} = {s}]\n", .{ info.path, info.format });
|
||||||
|
}
|
||||||
|
if (info.is_node_modules) {
|
||||||
|
try writer.print("[NODE_MODULES: {s}]\n", .{info.path});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("```\n\n");
|
||||||
|
|
||||||
|
// All import relationships
|
||||||
|
try writer.writeAll("### All Imports\n\n");
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
var import_iter2 = inputs.object.iterator();
|
||||||
|
while (import_iter2.next()) |entry| {
|
||||||
|
const source_path = entry.key_ptr.*;
|
||||||
|
const input2 = entry.value_ptr.*;
|
||||||
|
if (input2 != .object) continue;
|
||||||
|
|
||||||
|
if (input2.object.get("imports")) |imps| {
|
||||||
|
if (imps == .array) {
|
||||||
|
for (imps.array.items) |imp| {
|
||||||
|
if (imp == .object) {
|
||||||
|
const is_ext = blk: {
|
||||||
|
if (imp.object.get("external")) |ext| {
|
||||||
|
if (ext == .bool) break :blk ext.bool;
|
||||||
|
}
|
||||||
|
break :blk false;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (imp.object.get("path")) |imp_path| {
|
||||||
|
if (imp_path == .string) {
|
||||||
|
if (is_ext) {
|
||||||
|
try writer.print("[EXTERNAL: {s} imports {s}]\n", .{ source_path, imp_path.string });
|
||||||
|
} else {
|
||||||
|
try writer.print("[IMPORT: {s} -> {s}]\n", .{ source_path, imp_path.string });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("```\n\n");
|
||||||
|
|
||||||
|
// All reverse dependencies (imported by)
|
||||||
|
try writer.writeAll("### Reverse Dependencies (Imported By)\n\n");
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
var ib_iter2 = imported_by.iterator();
|
||||||
|
while (ib_iter2.next()) |entry| {
|
||||||
|
const target = entry.key_ptr.*;
|
||||||
|
for (entry.value_ptr.items) |importer| {
|
||||||
|
try writer.print("[IMPORTED_BY: {s} <- {s}]\n", .{ target, importer });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("```\n\n");
|
||||||
|
|
||||||
|
// Entry points
|
||||||
|
try writer.writeAll("### Entry Points\n\n");
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
var out_iter3 = outputs.object.iterator();
|
||||||
|
while (out_iter3.next()) |entry| {
|
||||||
|
const output_path2 = entry.key_ptr.*;
|
||||||
|
const output2 = entry.value_ptr.*;
|
||||||
|
if (output2 != .object) continue;
|
||||||
|
|
||||||
|
if (output2.object.get("entryPoint")) |ep| {
|
||||||
|
if (ep == .string) {
|
||||||
|
var size: u64 = 0;
|
||||||
|
if (output2.object.get("bytes")) |bytes| {
|
||||||
|
if (bytes == .integer) {
|
||||||
|
size = @intCast(bytes.integer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.print("[ENTRY: {s} -> {s} ({d} bytes)]\n", .{ ep.string, output_path2, size });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("```\n\n");
|
||||||
|
|
||||||
|
// node_modules summary
|
||||||
|
if (node_modules_count > 0) {
|
||||||
|
try writer.writeAll("### node_modules Summary\n\n");
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
for (input_files.items) |info| {
|
||||||
|
if (info.is_node_modules and info.bytes_in_output > 0) {
|
||||||
|
try writer.print("[NODE_MODULES: {s} (contributes {d} bytes)]\n", .{ info.path, info.bytes_in_output });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try writer.writeAll("```\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
return md.toOwnedSlice();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Strips leading "../" sequences from a relative path.
|
||||||
|
/// e.g., "../utils/logger.js" -> "utils/logger.js"
|
||||||
|
fn stripParentRefs(path: []const u8) []const u8 {
|
||||||
|
var result = path;
|
||||||
|
while (result.len >= 3 and std.mem.startsWith(u8, result, "../")) {
|
||||||
|
result = result[3..];
|
||||||
|
}
|
||||||
|
// Also handle ./ prefix
|
||||||
|
while (result.len >= 2 and std.mem.startsWith(u8, result, "./")) {
|
||||||
|
result = result[2..];
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const bun = @import("bun");
|
const bun = @import("bun");
|
||||||
|
|||||||
@@ -433,6 +433,7 @@ pub const Command = struct {
|
|||||||
outdir: []const u8 = "",
|
outdir: []const u8 = "",
|
||||||
outfile: []const u8 = "",
|
outfile: []const u8 = "",
|
||||||
metafile: [:0]const u8 = "",
|
metafile: [:0]const u8 = "",
|
||||||
|
metafile_md: [:0]const u8 = "",
|
||||||
root_dir: []const u8 = "",
|
root_dir: []const u8 = "",
|
||||||
public_path: []const u8 = "",
|
public_path: []const u8 = "",
|
||||||
entry_naming: []const u8 = "[dir]/[name].[ext]",
|
entry_naming: []const u8 = "[dir]/[name].[ext]",
|
||||||
|
|||||||
@@ -171,6 +171,7 @@ pub const build_only_params = [_]ParamType{
|
|||||||
clap.parseParam("--outdir <STR> Default to \"dist\" if multiple files") catch unreachable,
|
clap.parseParam("--outdir <STR> Default to \"dist\" if multiple files") catch unreachable,
|
||||||
clap.parseParam("--outfile <STR> Write to a file") catch unreachable,
|
clap.parseParam("--outfile <STR> Write to a file") catch unreachable,
|
||||||
clap.parseParam("--metafile <STR>? Write a JSON file with metadata about the build") catch unreachable,
|
clap.parseParam("--metafile <STR>? Write a JSON file with metadata about the build") catch unreachable,
|
||||||
|
clap.parseParam("--metafile-md <STR>? Write a markdown file with a visualization of the module graph (LLM-friendly)") catch unreachable,
|
||||||
clap.parseParam("--sourcemap <STR>? Build with sourcemaps - 'linked', 'inline', 'external', or 'none'") catch unreachable,
|
clap.parseParam("--sourcemap <STR>? Build with sourcemaps - 'linked', 'inline', 'external', or 'none'") catch unreachable,
|
||||||
clap.parseParam("--banner <STR> Add a banner to the bundled output such as \"use client\"; for a bundle being used with RSCs") catch unreachable,
|
clap.parseParam("--banner <STR> Add a banner to the bundled output such as \"use client\"; for a bundle being used with RSCs") catch unreachable,
|
||||||
clap.parseParam("--footer <STR> Add a footer to the bundled output such as // built with bun!") catch unreachable,
|
clap.parseParam("--footer <STR> Add a footer to the bundled output such as // built with bun!") catch unreachable,
|
||||||
@@ -1271,6 +1272,14 @@ pub fn parse(allocator: std.mem.Allocator, ctx: Command.Context, comptime cmd: C
|
|||||||
"meta.json";
|
"meta.json";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (args.option("--metafile-md")) |metafile_md| {
|
||||||
|
// If --metafile-md is passed without a value, default to "meta.md"
|
||||||
|
ctx.bundler_options.metafile_md = if (metafile_md.len > 0)
|
||||||
|
bun.handleOom(allocator.dupeZ(u8, metafile_md))
|
||||||
|
else
|
||||||
|
"meta.md";
|
||||||
|
}
|
||||||
|
|
||||||
if (args.option("--root")) |root_dir| {
|
if (args.option("--root")) |root_dir| {
|
||||||
if (root_dir.len > 0) {
|
if (root_dir.len > 0) {
|
||||||
ctx.bundler_options.root_dir = root_dir;
|
ctx.bundler_options.root_dir = root_dir;
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ pub const BuildCommand = struct {
|
|||||||
this_transpiler.options.bundler_feature_flags = Runtime.Features.initBundlerFeatureFlags(allocator, ctx.args.feature_flags);
|
this_transpiler.options.bundler_feature_flags = Runtime.Features.initBundlerFeatureFlags(allocator, ctx.args.feature_flags);
|
||||||
|
|
||||||
this_transpiler.options.css_chunking = ctx.bundler_options.css_chunking;
|
this_transpiler.options.css_chunking = ctx.bundler_options.css_chunking;
|
||||||
this_transpiler.options.metafile = ctx.bundler_options.metafile.len > 0;
|
this_transpiler.options.metafile = ctx.bundler_options.metafile.len > 0 or ctx.bundler_options.metafile_md.len > 0;
|
||||||
|
|
||||||
this_transpiler.options.output_dir = ctx.bundler_options.outdir;
|
this_transpiler.options.output_dir = ctx.bundler_options.outdir;
|
||||||
this_transpiler.options.output_format = ctx.bundler_options.output_format;
|
this_transpiler.options.output_format = ctx.bundler_options.output_format;
|
||||||
@@ -352,6 +352,35 @@ pub const BuildCommand = struct {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Write markdown metafile if requested
|
||||||
|
if (ctx.bundler_options.metafile_md.len > 0) {
|
||||||
|
const metafile_md = MetafileBuilder.generateMarkdown(allocator, metafile_json) catch |err| blk: {
|
||||||
|
Output.warn("Failed to generate markdown metafile: {s}", .{@errorName(err)});
|
||||||
|
break :blk null;
|
||||||
|
};
|
||||||
|
if (metafile_md) |md_content| {
|
||||||
|
defer allocator.free(md_content);
|
||||||
|
const file = switch (bun.sys.File.makeOpen(ctx.bundler_options.metafile_md, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) {
|
||||||
|
.result => |f| f,
|
||||||
|
.err => |err| {
|
||||||
|
Output.err(err, "could not open metafile-md {f}", .{bun.fmt.quote(ctx.bundler_options.metafile_md)});
|
||||||
|
exitOrWatch(1, ctx.debug.hot_reload == .watch);
|
||||||
|
unreachable;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
defer file.close();
|
||||||
|
|
||||||
|
switch (file.writeAll(md_content)) {
|
||||||
|
.result => {},
|
||||||
|
.err => |err| {
|
||||||
|
Output.err(err, "could not write metafile-md {f}", .{bun.fmt.quote(ctx.bundler_options.metafile_md)});
|
||||||
|
exitOrWatch(1, ctx.debug.hot_reload == .watch);
|
||||||
|
unreachable;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
break :brk build_result.output_files.items;
|
break :brk build_result.output_files.items;
|
||||||
@@ -554,6 +583,7 @@ pub const BuildCommand = struct {
|
|||||||
.asset => Output.prettyFmt("<magenta>", true),
|
.asset => Output.prettyFmt("<magenta>", true),
|
||||||
.sourcemap => Output.prettyFmt("<d>", true),
|
.sourcemap => Output.prettyFmt("<d>", true),
|
||||||
.bytecode => Output.prettyFmt("<d>", true),
|
.bytecode => Output.prettyFmt("<d>", true),
|
||||||
|
.@"metafile-json", .@"metafile-markdown" => Output.prettyFmt("<green>", true),
|
||||||
});
|
});
|
||||||
|
|
||||||
try writer.writeAll(rel_path);
|
try writer.writeAll(rel_path);
|
||||||
@@ -584,6 +614,8 @@ pub const BuildCommand = struct {
|
|||||||
.asset => "asset",
|
.asset => "asset",
|
||||||
.sourcemap => "source map",
|
.sourcemap => "source map",
|
||||||
.bytecode => "bytecode",
|
.bytecode => "bytecode",
|
||||||
|
.@"metafile-json" => "metafile json",
|
||||||
|
.@"metafile-markdown" => "metafile markdown",
|
||||||
}});
|
}});
|
||||||
if (Output.enable_ansi_colors_stdout)
|
if (Output.enable_ansi_colors_stdout)
|
||||||
try writer.writeAll("\x1b[0m");
|
try writer.writeAll("\x1b[0m");
|
||||||
@@ -678,6 +710,7 @@ fn printSummary(bundled_end: i128, minify_duration: u64, minified: bool, input_c
|
|||||||
|
|
||||||
const string = []const u8;
|
const string = []const u8;
|
||||||
|
|
||||||
|
const MetafileBuilder = @import("../bundler/linker_context/MetafileBuilder.zig");
|
||||||
const fs = @import("../fs.zig");
|
const fs = @import("../fs.zig");
|
||||||
const options = @import("../options.zig");
|
const options = @import("../options.zig");
|
||||||
const resolve_path = @import("../resolver/resolve_path.zig");
|
const resolve_path = @import("../resolver/resolve_path.zig");
|
||||||
|
|||||||
@@ -166,6 +166,7 @@ using namespace JSC;
|
|||||||
macro(makeGetterTypeError) \
|
macro(makeGetterTypeError) \
|
||||||
macro(maxAge) \
|
macro(maxAge) \
|
||||||
macro(method) \
|
macro(method) \
|
||||||
|
macro(metafileJson) \
|
||||||
macro(mockedFunction) \
|
macro(mockedFunction) \
|
||||||
macro(mode) \
|
macro(mode) \
|
||||||
macro(mtimeMs) \
|
macro(mtimeMs) \
|
||||||
|
|||||||
@@ -1825,6 +1825,10 @@ pub const BundleOptions = struct {
|
|||||||
|
|
||||||
compile: bool = false,
|
compile: bool = false,
|
||||||
metafile: bool = false,
|
metafile: bool = false,
|
||||||
|
/// Path to write JSON metafile (for Bun.build API)
|
||||||
|
metafile_json_path: []const u8 = "",
|
||||||
|
/// Path to write markdown metafile (for Bun.build API)
|
||||||
|
metafile_markdown_path: []const u8 = "",
|
||||||
|
|
||||||
/// Set when bake.DevServer is bundling.
|
/// Set when bake.DevServer is bundling.
|
||||||
dev_server: ?*bun.bake.DevServer = null,
|
dev_server: ?*bun.bake.DevServer = null,
|
||||||
|
|||||||
@@ -46,13 +46,12 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
expect(typeof result.metafile).toBe("object");
|
expect(typeof result.metafile).toBe("object");
|
||||||
|
|
||||||
// Check inputs structure
|
// Check metafile structure - metafile returns the JSON directly (backward compatible with esbuild)
|
||||||
expect(result.metafile.inputs).toBeDefined();
|
const metafile = result.metafile as Metafile;
|
||||||
expect(typeof result.metafile.inputs).toBe("object");
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
expect(typeof metafile.inputs).toBe("object");
|
||||||
// Check outputs structure
|
expect(metafile.outputs).toBeDefined();
|
||||||
expect(result.metafile.outputs).toBeDefined();
|
expect(typeof metafile.outputs).toBe("object");
|
||||||
expect(typeof result.metafile.outputs).toBe("object");
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("metafile inputs contain file metadata", async () => {
|
test("metafile inputs contain file metadata", async () => {
|
||||||
@@ -69,7 +68,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
const inputKeys = Object.keys(inputs);
|
const inputKeys = Object.keys(inputs);
|
||||||
|
|
||||||
// Should have at least 2 input files
|
// Should have at least 2 input files
|
||||||
@@ -97,7 +96,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
const outputKeys = Object.keys(outputs);
|
const outputKeys = Object.keys(outputs);
|
||||||
|
|
||||||
// Should have at least 1 output
|
// Should have at least 1 output
|
||||||
@@ -129,7 +128,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
// Find the entry file in inputs
|
// Find the entry file in inputs
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let entryInput: MetafileInput | null = null;
|
let entryInput: MetafileInput | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
if (path.includes("index.js")) {
|
if (path.includes("index.js")) {
|
||||||
@@ -158,7 +157,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
// Find the entry file in inputs
|
// Find the entry file in inputs
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let entryImports: MetafileImport[] | null = null;
|
let entryImports: MetafileImport[] | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
if (path.includes("entry.js")) {
|
if (path.includes("entry.js")) {
|
||||||
@@ -205,7 +204,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
const outputKeys = Object.keys(outputs);
|
const outputKeys = Object.keys(outputs);
|
||||||
expect(outputKeys.length).toBeGreaterThanOrEqual(1);
|
expect(outputKeys.length).toBeGreaterThanOrEqual(1);
|
||||||
|
|
||||||
@@ -228,7 +227,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
const outputKeys = Object.keys(outputs);
|
const outputKeys = Object.keys(outputs);
|
||||||
|
|
||||||
// At least one output should have entryPoint
|
// At least one output should have entryPoint
|
||||||
@@ -256,7 +255,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
// At least one input should have format
|
// At least one input should have format
|
||||||
let hasFormat = false;
|
let hasFormat = false;
|
||||||
for (const key of Object.keys(inputs)) {
|
for (const key of Object.keys(inputs)) {
|
||||||
@@ -283,7 +282,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
// Find the foo.js file which uses CommonJS exports
|
// Find the foo.js file which uses CommonJS exports
|
||||||
let fooInput: MetafileInput | null = null;
|
let fooInput: MetafileInput | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
@@ -311,7 +310,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let foundExternal = false;
|
let foundExternal = false;
|
||||||
|
|
||||||
for (const key of Object.keys(inputs)) {
|
for (const key of Object.keys(inputs)) {
|
||||||
@@ -343,7 +342,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
const outputKeys = Object.keys(outputs);
|
const outputKeys = Object.keys(outputs);
|
||||||
|
|
||||||
// With splitting, we should have more outputs (shared chunk)
|
// With splitting, we should have more outputs (shared chunk)
|
||||||
@@ -365,7 +364,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
// Find the entry file in inputs
|
// Find the entry file in inputs
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let jsonImport: MetafileImport | null = null;
|
let jsonImport: MetafileImport | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
if (path.includes("entry.js")) {
|
if (path.includes("entry.js")) {
|
||||||
@@ -399,7 +398,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
// Find the entry file in inputs
|
// Find the entry file in inputs
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let requireImport: MetafileImport | null = null;
|
let requireImport: MetafileImport | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
if (path.includes("entry.js")) {
|
if (path.includes("entry.js")) {
|
||||||
@@ -433,7 +432,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
// Find the entry file in inputs
|
// Find the entry file in inputs
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
let dynamicImport: MetafileImport | null = null;
|
let dynamicImport: MetafileImport | null = null;
|
||||||
for (const [path, input] of Object.entries(inputs)) {
|
for (const [path, input] of Object.entries(inputs)) {
|
||||||
if (path.includes("entry.js")) {
|
if (path.includes("entry.js")) {
|
||||||
@@ -454,7 +453,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(dynamicImport!.path).toMatch(/^\.\/chunk-[a-z0-9]+\.js$/);
|
expect(dynamicImport!.path).toMatch(/^\.\/chunk-[a-z0-9]+\.js$/);
|
||||||
|
|
||||||
// Verify the path corresponds to an actual output chunk
|
// Verify the path corresponds to an actual output chunk
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
const outputPaths = Object.keys(outputs);
|
const outputPaths = Object.keys(outputs);
|
||||||
expect(outputPaths).toContain(dynamicImport!.path);
|
expect(outputPaths).toContain(dynamicImport!.path);
|
||||||
});
|
});
|
||||||
@@ -473,7 +472,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const outputs = result.metafile.outputs as Record<string, MetafileOutput>;
|
const outputs = (result.metafile as Metafile).outputs as Record<string, MetafileOutput>;
|
||||||
|
|
||||||
// Find the JS output that should reference the CSS bundle
|
// Find the JS output that should reference the CSS bundle
|
||||||
let foundCssBundle = false;
|
let foundCssBundle = false;
|
||||||
@@ -503,7 +502,7 @@ describe("bundler metafile", () => {
|
|||||||
expect(result.success).toBe(true);
|
expect(result.success).toBe(true);
|
||||||
expect(result.metafile).toBeDefined();
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
const inputs = result.metafile.inputs as Record<string, MetafileInput>;
|
const inputs = (result.metafile as Metafile).inputs as Record<string, MetafileInput>;
|
||||||
const inputKeys = Object.keys(inputs);
|
const inputKeys = Object.keys(inputs);
|
||||||
|
|
||||||
// Should have both files
|
// Should have both files
|
||||||
@@ -525,3 +524,680 @@ describe("bundler metafile", () => {
|
|||||||
expect(bImportsA).toBe(true);
|
expect(bImportsA).toBe(true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("Bun.build metafile option variants", () => {
|
||||||
|
test("metafile: string writes JSON to file path", async () => {
|
||||||
|
using dir = tempDir("metafile-string-path", {
|
||||||
|
"entry.js": `import { foo } from "./foo.js"; console.log(foo);`,
|
||||||
|
"foo.js": `export const foo = "hello";`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/entry.js`],
|
||||||
|
outdir: `${dir}/dist`,
|
||||||
|
metafile: "output-meta.json",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
// Check JSON file was written (relative to outdir)
|
||||||
|
const jsonFile = Bun.file(`${dir}/dist/output-meta.json`);
|
||||||
|
expect(await jsonFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
// Verify JSON content
|
||||||
|
const content = await jsonFile.text();
|
||||||
|
const parsed = JSON.parse(content);
|
||||||
|
expect(parsed.inputs).toBeDefined();
|
||||||
|
expect(parsed.outputs).toBeDefined();
|
||||||
|
|
||||||
|
// Also check result.metafile is available (backward compatible - returns JSON directly)
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
const metafile = result.metafile as Metafile;
|
||||||
|
expect(typeof metafile).toBe("object");
|
||||||
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
expect(metafile.outputs).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile: { json: path } writes JSON to specified path", async () => {
|
||||||
|
using dir = tempDir("metafile-object-json", {
|
||||||
|
"main.js": `export const value = 42;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/main.js`],
|
||||||
|
outdir: `${dir}/dist`,
|
||||||
|
metafile: { json: "custom-meta.json" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
// Check JSON file was written (relative to outdir)
|
||||||
|
const jsonFile = Bun.file(`${dir}/dist/custom-meta.json`);
|
||||||
|
expect(await jsonFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
// Verify content
|
||||||
|
const parsed = JSON.parse(await jsonFile.text());
|
||||||
|
expect(parsed.inputs).toBeDefined();
|
||||||
|
expect(parsed.outputs).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile: { markdown: path } writes markdown to specified path", async () => {
|
||||||
|
using dir = tempDir("metafile-object-md", {
|
||||||
|
"app.js": `import "./util.js"; console.log("app");`,
|
||||||
|
"util.js": `console.log("util");`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/app.js`],
|
||||||
|
outdir: `${dir}/dist`,
|
||||||
|
metafile: { markdown: "analysis.md" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
// Check markdown file was written (relative to outdir)
|
||||||
|
const mdFile = Bun.file(`${dir}/dist/analysis.md`);
|
||||||
|
expect(await mdFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
// Verify markdown content
|
||||||
|
const content = await mdFile.text();
|
||||||
|
expect(content).toContain("# Bundle Analysis Report");
|
||||||
|
expect(content).toContain("app.js");
|
||||||
|
|
||||||
|
// Also check result.metafile is available (backward compatible - returns JSON directly)
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
const metafile = result.metafile as Metafile;
|
||||||
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile: { json: path, markdown: path } writes both files", async () => {
|
||||||
|
using dir = tempDir("metafile-object-both", {
|
||||||
|
"index.js": `import { helper } from "./helper.js"; helper();`,
|
||||||
|
"helper.js": `export function helper() { return "help"; }`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/index.js`],
|
||||||
|
outdir: `${dir}/dist`,
|
||||||
|
metafile: {
|
||||||
|
json: "meta.json",
|
||||||
|
markdown: "meta.md",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
// Check both files exist (relative to outdir)
|
||||||
|
const jsonFile = Bun.file(`${dir}/dist/meta.json`);
|
||||||
|
const mdFile = Bun.file(`${dir}/dist/meta.md`);
|
||||||
|
expect(await jsonFile.exists()).toBe(true);
|
||||||
|
expect(await mdFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
// Verify JSON
|
||||||
|
const parsedJson = JSON.parse(await jsonFile.text());
|
||||||
|
expect(parsedJson.inputs).toBeDefined();
|
||||||
|
|
||||||
|
// Verify markdown
|
||||||
|
const mdContent = await mdFile.text();
|
||||||
|
expect(mdContent).toContain("# Bundle Analysis Report");
|
||||||
|
|
||||||
|
// result.metafile is available (backward compatible - returns JSON directly)
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
const metafile = result.metafile as Metafile;
|
||||||
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
expect(metafile.outputs).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile is lazily parsed", async () => {
|
||||||
|
using dir = tempDir("metafile-lazy-json", {
|
||||||
|
"entry.js": `export const x = 1;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/entry.js`],
|
||||||
|
metafile: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
|
||||||
|
// First access should parse the JSON (backward compatible - returns JSON directly)
|
||||||
|
const metafile1 = result.metafile as Metafile;
|
||||||
|
expect(metafile1).toBeDefined();
|
||||||
|
expect(typeof metafile1).toBe("object");
|
||||||
|
expect(metafile1.inputs).toBeDefined();
|
||||||
|
|
||||||
|
// Second access should return the same cached object
|
||||||
|
const metafile2 = result.metafile as Metafile;
|
||||||
|
expect(metafile1).toBe(metafile2); // Same reference (memoized)
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile: true provides metafile object", async () => {
|
||||||
|
using dir = tempDir("metafile-true", {
|
||||||
|
"test.js": `console.log("test");`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/test.js`],
|
||||||
|
metafile: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
// Backward compatible - returns JSON directly
|
||||||
|
const metafile = result.metafile as Metafile;
|
||||||
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
expect(metafile.outputs).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("metafile: { markdown: path } provides metafile object", async () => {
|
||||||
|
using dir = tempDir("metafile-md-has-json", {
|
||||||
|
"test.js": `export const a = 1;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await Bun.build({
|
||||||
|
entrypoints: [`${dir}/test.js`],
|
||||||
|
outdir: `${dir}/dist`,
|
||||||
|
metafile: { markdown: "meta.md" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.metafile).toBeDefined();
|
||||||
|
// Backward compatible - returns JSON directly
|
||||||
|
const metafile = result.metafile as Metafile;
|
||||||
|
expect(metafile.inputs).toBeDefined();
|
||||||
|
expect(metafile.outputs).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// CLI tests for --metafile-md
|
||||||
|
import { bunEnv, bunExe } from "harness";
|
||||||
|
|
||||||
|
describe("bun build --metafile-md", () => {
|
||||||
|
test("generates markdown metafile with default name", async () => {
|
||||||
|
using dir = tempDir("metafile-md-test", {
|
||||||
|
"index.js": `import { foo } from "./foo.js"; console.log(foo);`,
|
||||||
|
"foo.js": `export const foo = "hello";`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "index.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
// Check meta.md was created
|
||||||
|
const metaFile = Bun.file(`${dir}/meta.md`);
|
||||||
|
expect(await metaFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
const content = await metaFile.text();
|
||||||
|
|
||||||
|
// Verify markdown structure
|
||||||
|
expect(content).toContain("# Bundle Analysis Report");
|
||||||
|
expect(content).toContain("## Quick Summary");
|
||||||
|
expect(content).toContain("## Entry Point Analysis");
|
||||||
|
expect(content).toContain("## Full Module Graph");
|
||||||
|
|
||||||
|
// Verify content includes our files
|
||||||
|
expect(content).toContain("index.js");
|
||||||
|
expect(content).toContain("foo.js");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("generates markdown metafile with custom name", async () => {
|
||||||
|
using dir = tempDir("metafile-md-custom-name", {
|
||||||
|
"main.js": `export const value = 42;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "main.js", "--metafile-md=build-graph.md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
// Check custom-named file was created
|
||||||
|
const metaFile = Bun.file(`${dir}/build-graph.md`);
|
||||||
|
expect(await metaFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
const content = await metaFile.text();
|
||||||
|
expect(content).toContain("# Bundle Analysis Report");
|
||||||
|
expect(content).toContain("main.js");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("generates both metafile and metafile-md when both specified", async () => {
|
||||||
|
using dir = tempDir("metafile-both", {
|
||||||
|
"app.js": `import "./util.js"; console.log("app");`,
|
||||||
|
"util.js": `console.log("util");`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "app.js", "--metafile=meta.json", "--metafile-md=meta.md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
// Check both files exist
|
||||||
|
const jsonFile = Bun.file(`${dir}/meta.json`);
|
||||||
|
const mdFile = Bun.file(`${dir}/meta.md`);
|
||||||
|
|
||||||
|
expect(await jsonFile.exists()).toBe(true);
|
||||||
|
expect(await mdFile.exists()).toBe(true);
|
||||||
|
|
||||||
|
// Verify JSON is valid
|
||||||
|
const jsonContent = await jsonFile.text();
|
||||||
|
const parsed = JSON.parse(jsonContent);
|
||||||
|
expect(parsed.inputs).toBeDefined();
|
||||||
|
expect(parsed.outputs).toBeDefined();
|
||||||
|
|
||||||
|
// Verify markdown structure
|
||||||
|
const mdContent = await mdFile.text();
|
||||||
|
expect(mdContent).toContain("# Bundle Analysis Report");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes summary metrics", async () => {
|
||||||
|
using dir = tempDir("metafile-md-metrics", {
|
||||||
|
"entry.js": `import { a } from "./a.js"; import { b } from "./b.js"; console.log(a, b);`,
|
||||||
|
"a.js": `export const a = 1;`,
|
||||||
|
"b.js": `export const b = 2;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Verify summary table
|
||||||
|
expect(content).toContain("| Input modules |");
|
||||||
|
expect(content).toContain("| Entry points |");
|
||||||
|
expect(content).toContain("| Total output size |");
|
||||||
|
expect(content).toContain("| ESM modules |");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes module format information", async () => {
|
||||||
|
using dir = tempDir("metafile-md-format", {
|
||||||
|
"esm.js": `export const x = 1;`,
|
||||||
|
"cjs.js": `module.exports = { y: 2 };`,
|
||||||
|
"entry.js": `import { x } from "./esm.js"; const cjs = require("./cjs.js"); console.log(x, cjs);`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should indicate both esm and cjs formats
|
||||||
|
expect(content).toContain("**Format**: esm");
|
||||||
|
expect(content).toContain("**Format**: cjs");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes external imports", async () => {
|
||||||
|
using dir = tempDir("metafile-md-external", {
|
||||||
|
"app.js": `import fs from "fs"; console.log(fs);`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "app.js", "--metafile-md", "--external=fs", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Check external is noted in summary
|
||||||
|
expect(content).toContain("External imports");
|
||||||
|
|
||||||
|
// Check external marker in imports list
|
||||||
|
expect(content).toContain("**external**");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes exports list", async () => {
|
||||||
|
using dir = tempDir("metafile-md-exports", {
|
||||||
|
"lib.js": `export const foo = 1; export const bar = 2; export default function main() {}`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "lib.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Check exports are listed
|
||||||
|
expect(content).toContain("**Exports**:");
|
||||||
|
expect(content).toContain("`foo`");
|
||||||
|
expect(content).toContain("`bar`");
|
||||||
|
expect(content).toContain("`default`");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes bundled modules table", async () => {
|
||||||
|
using dir = tempDir("metafile-md-bundled", {
|
||||||
|
"index.js": `import { utils } from "./utils.js"; utils();`,
|
||||||
|
"utils.js": `export function utils() { return "utility"; }`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "index.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Check bundled modules table
|
||||||
|
expect(content).toContain("**Bundled modules**");
|
||||||
|
expect(content).toContain("| Bytes | Module |");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes CSS bundle reference", async () => {
|
||||||
|
using dir = tempDir("metafile-md-css", {
|
||||||
|
"app.js": `import "./styles.css"; console.log("styled");`,
|
||||||
|
"styles.css": `.foo { color: red; }`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "app.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Check CSS bundle reference
|
||||||
|
expect(content).toContain("**CSS bundle**:");
|
||||||
|
expect(content).toContain(".css");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes import kinds", async () => {
|
||||||
|
using dir = tempDir("metafile-md-import-kinds", {
|
||||||
|
"entry.js": `
|
||||||
|
import { static_import } from "./static.js";
|
||||||
|
const dynamic = import("./dynamic.js");
|
||||||
|
const required = require("./required.js");
|
||||||
|
`,
|
||||||
|
"static.js": `export const static_import = 1;`,
|
||||||
|
"dynamic.js": `export const dynamic_value = 2;`,
|
||||||
|
"required.js": `module.exports = { required_value: 3 };`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist", "--splitting"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Check import kinds are shown
|
||||||
|
expect(content).toContain("import-statement");
|
||||||
|
expect(content).toContain("dynamic-import");
|
||||||
|
expect(content).toContain("require-call");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown shows commonly imported modules", async () => {
|
||||||
|
using dir = tempDir("metafile-md-common-imports", {
|
||||||
|
"a.js": `import { shared } from "./shared.js"; console.log("a", shared);`,
|
||||||
|
"b.js": `import { shared } from "./shared.js"; console.log("b", shared);`,
|
||||||
|
"c.js": `import { shared } from "./shared.js"; console.log("c", shared);`,
|
||||||
|
"shared.js": `export const shared = "common code";`,
|
||||||
|
"entry.js": `import "./a.js"; import "./b.js"; import "./c.js";`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Verify the Dependency Chains section exists
|
||||||
|
expect(content).toContain("## Dependency Chains");
|
||||||
|
expect(content).toContain("Most Commonly Imported Modules");
|
||||||
|
|
||||||
|
// shared.js should be listed as commonly imported (by 3 files)
|
||||||
|
expect(content).toContain("shared.js");
|
||||||
|
|
||||||
|
// Should show imported by a.js, b.js, c.js
|
||||||
|
expect(content).toContain("a.js");
|
||||||
|
expect(content).toContain("b.js");
|
||||||
|
expect(content).toContain("c.js");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown shows largest files for bloat analysis", async () => {
|
||||||
|
using dir = tempDir("metafile-md-bloat", {
|
||||||
|
"entry.js": `import "./small.js"; import "./large.js";`,
|
||||||
|
"small.js": `export const s = 1;`,
|
||||||
|
"large.js": `export const large = "${"x".repeat(500)}";`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Verify bloat analysis section
|
||||||
|
expect(content).toContain("## Largest Modules by Output Contribution");
|
||||||
|
expect(content).toContain("bytes contributed to the output bundle");
|
||||||
|
expect(content).toContain("% of Total");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown shows output contribution", async () => {
|
||||||
|
using dir = tempDir("metafile-md-contrib", {
|
||||||
|
"entry.js": `export const x = 1;`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "entry.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should show output contribution in Full Module Graph
|
||||||
|
expect(content).toContain("**Output contribution**:");
|
||||||
|
expect(content).toMatch(/\d+\.\d+%/); // Should have percentage in Largest Modules section
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes grep-friendly raw data section", async () => {
|
||||||
|
using dir = tempDir("metafile-md-grep", {
|
||||||
|
"main.js": `import { helper } from "./helper.js"; console.log(helper);`,
|
||||||
|
"helper.js": `export const helper = "utility";`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "main.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should have table of contents
|
||||||
|
expect(content).toContain("## Table of Contents");
|
||||||
|
expect(content).toContain("[Quick Summary]");
|
||||||
|
expect(content).toContain("[Raw Data for Searching]");
|
||||||
|
|
||||||
|
// Should have raw data section
|
||||||
|
expect(content).toContain("## Raw Data for Searching");
|
||||||
|
|
||||||
|
// Should have grep-friendly markers
|
||||||
|
expect(content).toContain("[MODULE:");
|
||||||
|
expect(content).toContain("[OUTPUT_BYTES:");
|
||||||
|
expect(content).toContain("[IMPORT:");
|
||||||
|
expect(content).toContain("[IMPORTED_BY:");
|
||||||
|
|
||||||
|
// main.js imports helper.js should be searchable
|
||||||
|
expect(content).toMatch(/\[IMPORT: main\.js -> .*helper\.js\]/);
|
||||||
|
|
||||||
|
// helper.js is imported by main.js
|
||||||
|
expect(content).toMatch(/\[IMPORTED_BY: .*helper\.js <- main\.js\]/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes entry point markers", async () => {
|
||||||
|
using dir = tempDir("metafile-md-entry-markers", {
|
||||||
|
"app.js": `console.log("app");`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "app.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should have entry point marker in raw data
|
||||||
|
expect(content).toContain("[ENTRY:");
|
||||||
|
// Entry format is: [ENTRY: source -> output (bytes)]
|
||||||
|
expect(content).toMatch(/\[ENTRY: app\.js -> .*app\.js/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes external import markers", async () => {
|
||||||
|
using dir = tempDir("metafile-md-external-markers", {
|
||||||
|
"index.js": `import fs from "fs"; console.log(fs);`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "index.js", "--metafile-md", "--external=fs", "--outdir=dist", "--target=node"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should have external marker in raw data
|
||||||
|
expect(content).toContain("[EXTERNAL:");
|
||||||
|
expect(content).toMatch(/\[EXTERNAL: index\.js imports fs\]/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("markdown includes node_modules markers", async () => {
|
||||||
|
using dir = tempDir("metafile-md-node-modules", {
|
||||||
|
"app.js": `import lodash from "./node_modules/lodash/index.js"; console.log(lodash);`,
|
||||||
|
"node_modules/lodash/index.js": `export default { version: "4.0.0" };`,
|
||||||
|
});
|
||||||
|
|
||||||
|
await using proc = Bun.spawn({
|
||||||
|
cmd: [bunExe(), "build", "app.js", "--metafile-md", "--outdir=dist"],
|
||||||
|
env: bunEnv,
|
||||||
|
cwd: String(dir),
|
||||||
|
stderr: "pipe",
|
||||||
|
stdout: "pipe",
|
||||||
|
});
|
||||||
|
|
||||||
|
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||||
|
|
||||||
|
expect(exitCode).toBe(0);
|
||||||
|
|
||||||
|
const content = await Bun.file(`${dir}/meta.md`).text();
|
||||||
|
|
||||||
|
// Should have node_modules marker in raw data
|
||||||
|
expect(content).toContain("[NODE_MODULES:");
|
||||||
|
expect(content).toContain("node_modules/lodash");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user