Generate summary

This commit is contained in:
Jarred Sumner
2021-06-08 02:54:50 -07:00
parent aa554728f1
commit cdb9af36c1
10 changed files with 1200 additions and 934 deletions

2
.gitignore vendored
View File

@@ -37,4 +37,4 @@ out.*
out
.parcel-cache
esbuilddir
*.jsbundle
*.jsb

8
.vscode/launch.json vendored
View File

@@ -55,12 +55,12 @@
{
"type": "lldb",
"request": "launch",
"name": "Demo .jsbundle",
"name": "Demo .jsb",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"args": [
"./src/index.tsx",
"--resolve=dev",
"--jsbundle",
"--jsb",
"--public-url=http://localhost:9000/"
],
"cwd": "${workspaceFolder}/demos/simple-react",
@@ -69,9 +69,9 @@
{
"type": "lldb",
"request": "launch",
"name": "Demo Print .jsbundle",
"name": "Demo Print .jsb",
"program": "${workspaceFolder}/build/debug/macos-x86_64/esdev",
"args": ["./node_modules.jsbundle"],
"args": ["./node_modules.jsb"],
"cwd": "${workspaceFolder}/demos/simple-react",
"console": "internalConsole"
},

File diff suppressed because it is too large Load Diff

View File

@@ -14,7 +14,8 @@ usingnamespace @import("ast/base.zig");
usingnamespace @import("defines.zig");
const panicky = @import("panic_handler.zig");
const Fs = @import("fs.zig");
const Api = @import("api/schema.zig").Api;
const schema = @import("api/schema.zig");
const Api = schema.Api;
const _resolver = @import("./resolver/resolver.zig");
const sync = @import("sync.zig");
const ThreadPool = sync.ThreadPool;
@@ -264,11 +265,11 @@ pub fn NewBundler(cache_files: bool) type {
};
}
pub fn generate(bundler: *ThisBundler, allocator: *std.mem.Allocator) !void {
pub fn generate(bundler: *ThisBundler, allocator: *std.mem.Allocator, destination: string) !Api.JavascriptBundleContainer {
var tmpdir: std.fs.Dir = bundler.fs.tmpdir();
const tmpname = try bundler.fs.tmpname(".jsbundle");
const tmpname = try bundler.fs.tmpname(".jsb");
var tmpfile = try tmpdir.createFile(tmpname, .{});
var tmpfile = try tmpdir.createFile(tmpname, .{ .read = isDebug });
var generator = GenerateNodeModuleBundle{
.module_list = std.ArrayList(Api.JavascriptBundledModule).init(allocator),
.package_list = std.ArrayList(Api.JavascriptBundledPackage).init(allocator),
@@ -284,7 +285,10 @@ pub fn NewBundler(cache_files: bool) type {
};
var this = &generator;
// Always inline the runtime into the bundle
try generator.appendBytes(initial_header ++ runtime.SourceContent ++ "\n\n");
try generator.appendBytes(&initial_header);
// If we try to be smart and rely on .written, it turns out incorrect
const code_start_pos = try this.tmpfile.getPos();
try generator.appendBytes(runtime.SourceContent ++ "\n\n");
if (bundler.log.level == .verbose) {
bundler.resolver.debug_logs = try DebugLogs.init(allocator);
@@ -303,57 +307,77 @@ pub fn NewBundler(cache_files: bool) type {
// Ensure we never overflow
this.code_end_byte_offset = @truncate(
u32,
std.math.max(this.tmpfile_byte_offset, @truncate(u32, initial_header.len)) - initial_header.len,
// Doing this math ourself seems to not necessarily produce correct results
(try this.tmpfile.getPos()),
);
if (isDebug) {
Output.print(
"Wrote {d} bytes of code for {d} modules and {d} packages\n",
.{ this.code_end_byte_offset - code_start_byte_offset, this.module_list.items.len, this.package_list.items.len },
);
}
var javascript_bundle_container = std.mem.zeroes(Api.JavascriptBundleContainer);
std.sort.sort(Api.JavascriptBundledModule, this.module_list.items, this, GenerateNodeModuleBundle.sortJavascriptModuleByPath);
std.sort.sort(
Api.JavascriptBundledModule,
this.module_list.items,
this,
GenerateNodeModuleBundle.sortJavascriptModuleByPath,
);
var hasher = std.hash.Wyhash.init(0);
// We want to sort the packages as well as the files
// The modules sort the packages already
// So can just copy it in the below loop.
var sorted_package_list = try allocator.alloc(Api.JavascriptBundledPackage, this.package_list.items.len);
// At this point, the module_list is sorted.
if (this.module_list.items.len > 0) {
var package_id_i: u32 = 0;
var i: usize = 0;
// Assumption: packages are immutable
// Assumption: node_modules are immutable
// Assumption: module files are immutable
// The etag is the hash of each module's path in sorted order
// followed by the hash of package-name@version
// This will allow any unused files to force re-updating the bundle
// or package version changes
// (They're not. But, for our purposes that's okay)
// The etag is:
// - The hash of each module's path in sorted order
// - The hash of each module's code size in sorted order
// - hash(hash(package_name, package_version))
// If this doesn't prove strong enough, we will do a proper content hash
// But I want to avoid that overhead unless proven necessary.
// There's a good chance we don't even strictly need an etag here.
var bytes: [4]u8 = undefined;
while (i < this.module_list.items.len) {
var current_package_id = this.module_list.items[i].package_id;
this.module_list.items[i].package_id = package_id_i;
var offset = @truncate(u32, i);
hasher.update(this.metadataStringPointer(this.module_list.items[i].path));
i += 1;
while (i < this.module_list.items.len and this.module_list.items[i].package_id == current_package_id) : (i += 1) {
this.module_list.items[i].package_id = package_id_i;
// Hash the file path
hasher.update(this.metadataStringPointer(this.module_list.items[i].path));
break;
// Then the length of the code
std.mem.writeIntNative(u32, &bytes, this.module_list.items[i].code.length);
hasher.update(&bytes);
}
this.package_list.items[current_package_id].modules_offset = offset;
this.package_list.items[current_package_id].modules_length = @truncate(u32, i) - offset;
var bytes: [4]u8 = undefined;
// Hash the hash of the package name
// it's hash(hash(package_name, package_version))
std.mem.writeIntNative(u32, &bytes, this.package_list.items[current_package_id].hash);
hasher.update(&bytes);
sorted_package_list[package_id_i] = this.package_list.items[current_package_id];
package_id_i += 1;
}
}
var javascript_bundle = std.mem.zeroes(Api.JavascriptBundle);
javascript_bundle.modules = this.module_list.items;
javascript_bundle.packages = this.package_list.items;
javascript_bundle.packages = sorted_package_list;
javascript_bundle.manifest_string = this.header_string_buffer.list.items;
javascript_bundle.generated_at = @truncate(u32, @intCast(u64, std.time.milliTimestamp()));
var from_name = "node_modules.jsbundle".*;
javascript_bundle.import_from_name = &from_name;
javascript_bundle.import_from_name = destination;
var etag_bytes: [8]u8 = undefined;
std.mem.writeIntNative(u64, &etag_bytes, hasher.final());
@@ -363,30 +387,55 @@ pub fn NewBundler(cache_files: bool) type {
javascript_bundle_container.bundle = javascript_bundle;
javascript_bundle_container.code_length = this.code_end_byte_offset;
var tmpwriter = this.tmpfile.writer();
try javascript_bundle_container.encode(tmpwriter);
try this.tmpfile.seekTo(magic_bytes.len);
var start_pos = try this.tmpfile.getPos();
var tmpwriter = std.io.bufferedWriter(this.tmpfile.writer());
const SchemaWriter = schema.Writer(@TypeOf(tmpwriter.writer()));
var schema_file_writer = SchemaWriter.init(tmpwriter.writer());
try javascript_bundle_container.encode(&schema_file_writer);
try tmpwriter.flush();
// sanity check
if (isDebug) {
try this.tmpfile.seekTo(start_pos);
var contents = try allocator.alloc(u8, (try this.tmpfile.getEndPos()) - start_pos);
var read_bytes = try this.tmpfile.read(contents);
var buf = contents[0..read_bytes];
var reader = schema.Reader.init(buf, allocator);
var decoder = try Api.JavascriptBundleContainer.decode(
&reader,
);
std.debug.assert(decoder.code_length.? == javascript_bundle_container.code_length.?);
}
var code_length_bytes: [4]u8 = undefined;
std.mem.writeIntNative(u32, &code_length_bytes, this.code_end_byte_offset);
try this.tmpfile.writeAll(&code_length_bytes);
_ = try std.os.pwrite(this.tmpfile.handle, &code_length_bytes, magic_bytes.len);
const top_dir = try std.fs.openDirAbsolute(this.bundler.fs.top_level_dir, .{});
try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, "node_modules.jsbundle");
_ = C.fchmod(
this.tmpfile.handle,
// chmod 777
0000010 | 0000100 | 0000001 | 0001000 | 0000040 | 0000004 | 0000002 | 0000400 | 0000200 | 0000020,
);
try std.os.renameat(tmpdir.fd, tmpname, top_dir.fd, destination);
// Print any errors at the end
try this.log.print(Output.errorWriter());
if (isDebug) {
Output.println("Saved node_modules.jsbundle", .{});
}
return javascript_bundle_container;
}
pub fn metadataStringPointer(this: *GenerateNodeModuleBundle, ptr: Api.StringPointer) string {
return this.header_string_buffer.list.items[ptr.offset .. ptr.offset + ptr.length];
}
// Since we trim the prefixes, we must also compare the package name
pub fn sortJavascriptModuleByPath(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledModule, b: Api.JavascriptBundledModule) bool {
return std.mem.order(u8, ctx.metadataStringPointer(a.path), ctx.metadataStringPointer(b.path)) == .lt;
return switch (std.mem.order(u8, ctx.metadataStringPointer(ctx.package_list.items[a.package_id].name), ctx.metadataStringPointer(ctx.package_list.items[b.package_id].name))) {
.eq => std.mem.order(u8, ctx.metadataStringPointer(a.path), ctx.metadataStringPointer(b.path)) == .lt,
.lt => true,
else => false,
};
}
// pub fn sortJavascriptPackageByName(ctx: *GenerateNodeModuleBundle, a: Api.JavascriptBundledPackage, b: Api.JavascriptBundledPackage) bool {
@@ -399,11 +448,12 @@ pub fn NewBundler(cache_files: bool) type {
}
fn processImportRecord(this: *GenerateNodeModuleBundle, import_record: ImportRecord) !void {}
const node_module_root_string = "node_modules" ++ std.fs.path.sep_str;
threadlocal var package_key_buf: [512]u8 = undefined;
fn processFile(this: *GenerateNodeModuleBundle, _resolve: _resolver.Result) !void {
var resolve = _resolve;
if (resolve.is_external) return;
const node_module_root_string = comptime "node_modules" ++ std.fs.path.sep_str;
resolve.is_from_node_modules = strings.contains(resolve.path_pair.primary.text, node_module_root_string);
const loader = this.bundler.options.loaders.get(resolve.path_pair.primary.name.ext) orelse .file;
var bundler = this.bundler;
@@ -523,11 +573,22 @@ pub fn NewBundler(cache_files: bool) type {
},
);
}
const node_module_root = strings.indexOf(resolve.path_pair.primary.text, node_module_root_string) orelse unreachable;
// trim node_modules/${package.name}/ from the string to save space
// This reduces metadata size by about 30% for a large-ish file
// A future optimization here could be to reuse the string from the original path
var node_module_root = strings.indexOf(resolve.path_pair.primary.text, node_module_root_string) orelse unreachable;
// omit package name
node_module_root += package.name.len;
// omit node_modules
node_module_root += node_module_root_string.len;
// omit trailing separator
node_module_root += 1;
try this.module_list.append(
Api.JavascriptBundledModule{
.path = try this.appendHeaderString(resolve.path_pair.primary.text[node_module_root + node_module_root_string.len ..]),
.path = try this.appendHeaderString(
resolve.path_pair.primary.text[node_module_root..],
),
.package_id = package_get_or_put_entry.value_ptr.*,
.code = Api.StringPointer{
.length = @truncate(u32, code_length),

View File

@@ -132,7 +132,7 @@ pub const Cli = struct {
clap.parseParam("--platform <STR> \"browser\" or \"node\". Defaults to \"browser\"") catch unreachable,
clap.parseParam("--main-fields <STR>... Main fields to lookup in package.json. Defaults to --platform dependent") catch unreachable,
clap.parseParam("--scan Instead of bundling or transpiling, print a list of every file imported by an entry point, recursively") catch unreachable,
clap.parseParam("--jsbundle Generate a new node_modules.jsbundle file from the current node_modules folder and entry point(s)") catch unreachable,
clap.parseParam("--jsb Generate a new node_modules.jsb file from node_modules and entry point(s)") catch unreachable,
clap.parseParam("<POS>... Entry points to use") catch unreachable,
};
@@ -279,7 +279,7 @@ pub const Cli = struct {
.main_fields = args.options("--main-fields"),
.platform = platform,
.only_scan_dependencies = if (args.flag("--scan")) Api.ScanDependencyMode.all else Api.ScanDependencyMode._none,
.generate_node_module_bundle = if (args.flag("--jsbundle")) true else false,
.generate_node_module_bundle = if (args.flag("--jsb")) true else false,
};
}
};
@@ -306,10 +306,26 @@ pub const Cli = struct {
MainPanicHandler.Singleton = &panicker;
var args = try Arguments.parse(alloc.static, stdout, stderr);
if ((args.entry_points.len == 1 and args.entry_points[0].len > ".jsbundle".len and args.entry_points[0][args.entry_points[0].len - ".jsbundle".len] == '.' and strings.eqlComptime(args.entry_points[0][args.entry_points[0].len - "jsbundle".len ..], "jsbundle"))) {
if ((args.entry_points.len == 1 and args.entry_points[0].len > ".jsb".len and args.entry_points[0][args.entry_points[0].len - ".jsb".len] == '.' and strings.eqlComptime(args.entry_points[0][args.entry_points[0].len - "jsb".len ..], "jsb"))) {
var out_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
var input = try std.fs.openFileAbsolute(try std.os.realpath(args.entry_points[0], &out_buffer), .{ .read = true });
try NodeModuleBundle.printBundle(std.fs.File, input, @TypeOf(stdout), stdout);
const params = comptime [_]clap.Param(clap.Help){
clap.parseParam("--summary Print a summary") catch unreachable,
clap.parseParam("<POS>... ") catch unreachable,
};
var jsBundleArgs = clap.parse(clap.Help, &params, .{}) catch |err| {
try NodeModuleBundle.printBundle(std.fs.File, input, @TypeOf(stdout), stdout);
return;
};
if (jsBundleArgs.flag("--summary")) {
try NodeModuleBundle.printSummaryFromDisk(std.fs.File, input, @TypeOf(stdout), stdout, allocator);
} else {
try NodeModuleBundle.printBundle(std.fs.File, input, @TypeOf(stdout), stdout);
}
return;
}
@@ -326,7 +342,15 @@ pub const Cli = struct {
if ((args.generate_node_module_bundle orelse false)) {
var this_bundler = try bundler.ServeBundler.init(allocator, &log, args);
this_bundler.configureLinker();
try bundler.ServeBundler.GenerateNodeModuleBundle.generate(&this_bundler, allocator);
var filepath = "node_modules.jsb";
var node_modules = try bundler.ServeBundler.GenerateNodeModuleBundle.generate(&this_bundler, allocator, filepath);
var elapsed = @divTrunc(std.time.nanoTimestamp() - start_time, @as(i128, std.time.ns_per_ms));
var bundle = NodeModuleBundle.init(node_modules, allocator);
bundle.printSummary();
const indent = comptime " ";
Output.prettyln(indent ++ "<d>{d:6}ms elapsed", .{@intCast(u32, elapsed)});
Output.prettyln(indent ++ "<r>Saved to ./{s}", .{filepath});
return;
}

View File

@@ -6,3 +6,10 @@ pub extern "c" fn clonefileat(c_int, [*c]const u8, c_int, [*c]const u8, uint32_t
pub extern "c" fn fclonefileat(c_int, c_int, [*c]const u8, uint32_t: c_int) c_int;
// int clonefile(const char * src, const char * dst, int flags);
pub extern "c" fn clonefile([*c]const u8, [*c]const u8, uint32_t: c_int) c_int;
pub extern "c" fn chmod([*c]const u8, mode_t) c_int;
pub extern "c" fn fchmod(c_int, mode_t) c_int;
pub extern "c" fn umask(mode_t) mode_t;
pub extern "c" fn fchmodat(c_int, [*c]const u8, mode_t, c_int) c_int;
const mode_t = u16;

View File

@@ -87,7 +87,7 @@ pub fn StreamingClap(comptime Id: type, comptime ArgIterator: type) type {
return Arg(Id){ .param = param, .value = value };
}
return parser.err(arg, .{ .long = name }, error.InvalidArgument);
return null;
},
.short => return try parser.chainging(.{
.arg = arg,

View File

@@ -70,6 +70,11 @@ pub const Output = struct {
// return @TypeOf(std.io.bufferedWriter(stdout.writer()));
}
};
const BufferedStream = std.io.BufferedWriter(4096, @typeInfo(@TypeOf(Source.StreamType.writer)).Fn.return_type.?);
buffered_stream: BufferedStream,
buffered_error_stream: BufferedStream,
stream: StreamType,
error_stream: StreamType,
out_buffer: []u8 = &([_]u8{}),
@@ -79,13 +84,28 @@ pub const Output = struct {
stream: StreamType,
err: StreamType,
) Source {
return Source{ .stream = stream, .error_stream = err };
return Source{
.stream = stream,
.error_stream = err,
.buffered_stream = BufferedStream{ .unbuffered_writer = stream.writer() },
.buffered_error_stream = BufferedStream{ .unbuffered_writer = err.writer() },
};
}
pub fn set(_source: *Source) void {
source = _source;
}
};
pub var enable_ansi_colors = isNative;
pub var enable_buffering = true;
pub fn enableBuffering() void {
enable_buffering = true;
}
pub fn disableBuffering() void {
enable_buffering = false;
}
pub fn errorWriter() @typeInfo(@TypeOf(Source.StreamType.writer)).Fn.return_type.? {
return source.error_stream.writer();
@@ -97,6 +117,8 @@ pub const Output = struct {
pub fn flush() void {
if (isNative) {
source.buffered_stream.flush() catch {};
source.buffered_error_stream.flush() catch {};
// source.stream.flush() catch {};
// source.error_stream.flush() catch {};
}
@@ -140,7 +162,142 @@ pub const Output = struct {
const root = @import("root");
root.console_log(root.Uint8Array.fromSlice(source.out_buffer[0..source.stream.pos]));
} else {
std.fmt.format(source.stream.writer(), fmt, args) catch unreachable;
if (enable_buffering) {
std.fmt.format(source.buffered_stream.writer(), fmt, args) catch unreachable;
} else {
std.fmt.format(writer(), fmt, args) catch unreachable;
}
}
}
// Valid colors:
// <black>
// <blue>
// <cyan>
// <green>
// <magenta>
// <red>
// <white>
// <yellow>
// <b> - bold
// <d> - dim
// </r> - reset
// <r> - reset
fn _pretty(comptime fmt: string, args: anytype, comptime printer: anytype, comptime is_enabled: bool) void {
comptime var new_fmt: [fmt.len * 4]u8 = undefined;
comptime var new_fmt_i: usize = 0;
comptime const ED = "\x1b[";
@setEvalBranchQuota(9999);
comptime var i: usize = 0;
comptime while (i < fmt.len) {
const c = fmt[i];
switch (c) {
'\\' => {
i += 1;
if (fmt.len < i) {
switch (fmt[i]) {
'<', '>' => {
i += 1;
},
else => {
new_fmt[new_fmt_i] = '\\';
new_fmt_i += 1;
new_fmt[new_fmt_i] = fmt[i];
new_fmt_i += 1;
},
}
}
},
'>' => {
i += 1;
},
'{' => {
while (fmt.len > i and fmt[i] != '}') {
new_fmt[new_fmt_i] = fmt[i];
new_fmt_i += 1;
i += 1;
}
},
'<' => {
i += 1;
var is_reset = fmt[i] == '/';
if (is_reset) i += 1;
var start: usize = i;
while (i < fmt.len and fmt[i] != '>') {
i += 1;
}
const color_name = fmt[start..i];
const color_str = color_picker: {
if (std.mem.eql(u8, color_name, "black")) {
break :color_picker ED ++ "30m";
} else if (std.mem.eql(u8, color_name, "blue")) {
break :color_picker ED ++ "34m";
} else if (std.mem.eql(u8, color_name, "b")) {
break :color_picker ED ++ "1m";
} else if (std.mem.eql(u8, color_name, "d")) {
break :color_picker ED ++ "2m";
} else if (std.mem.eql(u8, color_name, "cyan")) {
break :color_picker ED ++ "36m";
} else if (std.mem.eql(u8, color_name, "green")) {
break :color_picker ED ++ "32m";
} else if (std.mem.eql(u8, color_name, "magenta")) {
break :color_picker ED ++ "35m";
} else if (std.mem.eql(u8, color_name, "red")) {
break :color_picker ED ++ "31m";
} else if (std.mem.eql(u8, color_name, "white")) {
break :color_picker ED ++ "37m";
} else if (std.mem.eql(u8, color_name, "yellow")) {
break :color_picker ED ++ "33m";
} else if (std.mem.eql(u8, color_name, "r")) {
is_reset = true;
break :color_picker "";
} else {
@compileError("Invalid color name passed:" ++ color_name);
}
};
var orig = new_fmt_i;
if (is_enabled) {
if (!is_reset) {
orig = new_fmt_i;
new_fmt_i += color_str.len;
std.mem.copy(u8, new_fmt[orig..new_fmt_i], color_str);
}
if (is_reset) {
const reset_sequence = "\x1b[0m";
orig = new_fmt_i;
new_fmt_i += reset_sequence.len;
std.mem.copy(u8, new_fmt[orig..new_fmt_i], reset_sequence);
}
}
},
else => {
new_fmt[new_fmt_i] = fmt[i];
new_fmt_i += 1;
i += 1;
},
}
};
printer(new_fmt[0..new_fmt_i], args);
}
pub fn pretty(comptime fmt: string, args: anytype) void {
if (enable_ansi_colors) {
_pretty(fmt, args, print, true);
} else {
_pretty(fmt, args, print, false);
}
}
pub fn prettyln(comptime fmt: string, args: anytype) void {
if (enable_ansi_colors) {
_pretty(fmt, args, println, true);
} else {
_pretty(fmt, args, println, false);
}
}

View File

@@ -38,6 +38,7 @@ pub fn main() anyerror!void {
// defer stdout.flush() catch {};
// defer stderr.flush() catch {};
Output.Source.set(&output_source);
Output.enable_ansi_colors = stderr.isTty();
defer Output.flush();
try cli.Cli.start(std.heap.c_allocator, stdout, stderr, MainPanicHandler);
}

View File

@@ -1,16 +1,27 @@
const Api = @import("./api/schema.zig").Api;
const schema = @import("./api/schema.zig");
const Api = schema.Api;
const std = @import("std");
usingnamespace @import("global.zig");
pub const NodeModuleBundle = struct {
container: *Api.JavascriptBundleContainer,
bundle: *Api.JavascriptBundle,
container: Api.JavascriptBundleContainer,
bundle: Api.JavascriptBundle,
allocator: *std.mem.Allocator,
bytes_ptr: []u8 = undefined,
bytes: []u8 = undefined,
fd: FileDescriptorType = 0,
pub const magic_bytes = "#!/usr/bin/env speedy\n\n";
threadlocal var jsbundle_prefix: [magic_bytes.len + 5]u8 = undefined;
pub fn init(container: Api.JavascriptBundleContainer, allocator: *std.mem.Allocator) NodeModuleBundle {
return NodeModuleBundle{
.container = container,
.bundle = container.bundle.?,
.allocator = allocator,
};
}
pub fn getCodeEndPosition(stream: anytype, comptime needs_seek: bool) !u32 {
if (needs_seek) try stream.seekTo(0);
@@ -23,18 +34,117 @@ pub const NodeModuleBundle = struct {
}
pub fn loadBundle(allocator: *std.mem.Allocator, stream: anytype) !NodeModuleBundle {
const end = try getCodeEndPosition(stream);
try stream.seekTo(end + 1);
var reader = stream.reader();
var container = try Api.JavascriptBundleContainer.decode(allocator, reader);
const end = try getCodeEndPosition(stream, false);
try stream.seekTo(end);
const file_end = try stream.getEndPos();
var file_bytes = try allocator.alloc(u8, file_end - end);
var read_count = try stream.read(file_bytes);
var read_bytes = file_bytes[0..read_count];
var reader = schema.Reader.init(read_bytes, allocator);
var container = try Api.JavascriptBundleContainer.decode(&reader);
return NodeModuleBundle{
.allocator = allocator,
.container = container,
.bundle = container.bundle,
.fd = if (std.meta.trait.hasField("handle")(stream)) stream.handle else 0,
.bundle = container.bundle.?,
.fd = stream.handle,
.bytes = read_bytes,
.bytes_ptr = file_bytes,
};
}
pub fn str(bundle: *const NodeModuleBundle, pointer: Api.StringPointer) string {
return bundle.bundle.manifest_string[pointer.offset .. pointer.offset + pointer.length];
}
pub fn getPackageSize(this: *const NodeModuleBundle, pkg: Api.JavascriptBundledPackage) usize {
const modules = this.bundle.modules[pkg.modules_offset .. pkg.modules_offset + pkg.modules_length];
var size: usize = 0;
for (modules) |module| {
size += module.code.length;
}
return size;
}
pub fn isPackageBigger(
this: *const NodeModuleBundle,
a: Api.JavascriptBundledPackage,
b: Api.JavascriptBundledPackage,
) bool {
return this.getPackageSize(a) < this.getPackageSize(b);
}
pub fn printSummary(this: *const NodeModuleBundle) void {
const last = this.bundle.packages.len - 1;
const indent = comptime " ";
for (this.bundle.packages) |pkg, i| {
const modules = this.bundle.modules[pkg.modules_offset .. pkg.modules_offset + pkg.modules_length];
Output.prettyln(
"<r><blue><b>{s}</r> v{s}",
.{ this.str(pkg.name), this.str(pkg.version) },
);
for (modules) |module| {
const size_level = switch (module.code.length) {
0...5_000 => SizeLevel.good,
5_001...74_999 => SizeLevel.neutral,
else => SizeLevel.bad,
};
Output.print(indent, .{});
prettySize(module.code.length, size_level, ">");
Output.prettyln(
indent ++ "<d>{s}</r>" ++ std.fs.path.sep_str ++ "{s}\n",
.{
this.str(pkg.name),
this.str(module.path),
},
);
}
Output.print("\n", .{});
}
const source_code_size = this.container.code_length.? - @intCast(u32, jsbundle_prefix.len);
Output.pretty("<b>", .{});
prettySize(source_code_size, .neutral, ">");
Output.prettyln("<b> JavaScript<r>", .{});
Output.prettyln(indent ++ "<b>{d:6} modules", .{this.bundle.modules.len});
Output.prettyln(indent ++ "<b>{d:6} packages", .{this.bundle.packages.len});
}
pub fn printSummaryFromDisk(
comptime StreamType: type,
input: StreamType,
comptime DestinationStreamType: type,
output: DestinationStreamType,
allocator: *std.mem.Allocator,
) !void {
const this = try loadBundle(allocator, input);
this.printSummary();
}
const SizeLevel = enum { good, neutral, bad };
fn prettySize(size: u32, level: SizeLevel, comptime align_char: []const u8) void {
switch (size) {
0...1024 * 1024 => {
switch (level) {
.bad => Output.pretty("<red>{d: " ++ align_char ++ "6.2} KB</r>", .{@intToFloat(f64, size) / 1024.0}),
.neutral => Output.pretty("{d: " ++ align_char ++ "6.2} KB</r>", .{@intToFloat(f64, size) / 1024.0}),
.good => Output.pretty("<green>{d: " ++ align_char ++ "6.2} KB</r>", .{@intToFloat(f64, size) / 1024.0}),
}
},
else => {
switch (level) {
.bad => Output.pretty("<red>{d: " ++ align_char ++ "6.2} MB</r>", .{@intToFloat(f64, size) / (1024 * 1024.0)}),
.neutral => Output.pretty("{d: " ++ align_char ++ "6.2} MB</r>", .{@intToFloat(f64, size) / (1024 * 1024.0)}),
.good => Output.pretty("<green>{d: " ++ align_char ++ "6.2} MB</r>", .{@intToFloat(f64, size) / (1024 * 1024.0)}),
}
},
}
}
pub fn printBundle(
comptime StreamType: type,
input: StreamType,
@@ -45,20 +155,18 @@ pub const NodeModuleBundle = struct {
pub fn run(in: StreamType, out: DestinationStreamType, end_at: u32) !void {
var buf: [4096]u8 = undefined;
var remain = @intCast(i64, end_at);
var read_amount: i64 = @intCast(i64, in.read(&buf) catch 0);
var read_amount: i64 = 99999;
while (remain > 0 and read_amount > 0) {
remain -= @intCast(i64, try out.write(buf[0..@intCast(usize, std.math.min(read_amount, remain))]));
read_amount = @intCast(i64, in.read(&buf) catch 0);
remain -= @intCast(i64, try out.write(buf[0..@intCast(usize, std.math.min(read_amount, remain))]));
}
_ = try out.write(buf[0..@intCast(usize, remain + 1)]);
}
};
if (isMac) {
// darwin only allows reading ahead on/off, not specific amount
_ = std.os.fcntl(input.handle, std.os.F_RDAHEAD, 1) catch 0;
}
const end = try getCodeEndPosition(input, false);
const end = (try getCodeEndPosition(input, false)) - @intCast(u32, jsbundle_prefix.len);
try BufferStreamContext.run(
input,