mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
[bun install] Add metadata hash
This commit is contained in:
@@ -16,7 +16,7 @@ git init && git add . && git commit -am "Initial commit"
|
||||
|
||||
$BUN_BIN install
|
||||
|
||||
ORIG_LOCKFILE=$($BUN_BIN bun.lockb)
|
||||
ORIG_LOCKFILE="$($BUN_BIN pm hash-string)"
|
||||
|
||||
[[ -z $(git status --untracked-files=no --porcelain) ]] || {
|
||||
echo "ERR: Expected empty git status, got '$(git status --untracked-files=no --porcelain)'"
|
||||
@@ -25,7 +25,7 @@ ORIG_LOCKFILE=$($BUN_BIN bun.lockb)
|
||||
|
||||
$BUN_BIN add react
|
||||
|
||||
NEW_LOCKFILE=$($BUN_BIN bun.lockb)
|
||||
NEW_LOCKFILE="$($BUN_BIN pm hash-string)"
|
||||
|
||||
diff <(echo "$ORIG_LOCKFILE") <(echo "$NEW_LOCKFILE") || {
|
||||
echo "ERR: Expected lockfile to be unchanged, got '$NEW_LOCKFILE'"
|
||||
@@ -37,15 +37,19 @@ diff <(echo "$ORIG_LOCKFILE") <(echo "$NEW_LOCKFILE") || {
|
||||
exit 1
|
||||
}
|
||||
|
||||
ORIG_HASH=$($BUN_BIN bun.lockb --hash)
|
||||
|
||||
$BUN_BIN remove react
|
||||
$BUN_BIN add react
|
||||
|
||||
NEW_LOCKFILE=$($BUN_BIN bun.lockb)
|
||||
NEW_HASH=$($BUN_BIN bun.lockb --hash)
|
||||
|
||||
diff <(echo "$ORIG_LOCKFILE") <(echo "$NEW_LOCKFILE") || {
|
||||
echo "ERR: Expected lockfile to be unchanged, got '$NEW_LOCKFILE'"
|
||||
if "$ORIG_HASH" != "$NEW_HASH"; then
|
||||
echo "ERR: Expected hash to be unchanged, got '$NEW_HASH'"
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
|
||||
[
|
||||
|
||||
echo '{ "dependencies": { "react": "17.0.2", "react-dom": "17.0.2" } }' >package.json
|
||||
|
||||
@@ -57,5 +61,12 @@ $BUN_BIN run ./index.js
|
||||
echo "var {version} = JSON.parse(require(\"fs\").readFileSync('./node_modules/react/package.json', 'utf8')); if (version !== '17.0.2') {throw new Error('Unexpected react version');}; " >index.js
|
||||
$BUN_BIN run ./index.js
|
||||
|
||||
realpath -e node_modules/react-dom
|
||||
realpath -e node_modules/react
|
||||
# This is just making sure that the JS was executed
|
||||
realpath -e node_modules/react-dom >/dev/null || {
|
||||
echo "ERR: Expected react-dom to be installed"
|
||||
exit 1
|
||||
}
|
||||
realpath -e node_modules/react >/dev/null || {
|
||||
echo "ERR: Expected react to be installed"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"peechy": "0.4.24"
|
||||
"peechy": "0.4.24",
|
||||
"react": "latest"
|
||||
},
|
||||
"scripts": {
|
||||
"build-runtime": "esbuild --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js",
|
||||
|
||||
10
src/cli.zig
10
src/cli.zig
@@ -686,6 +686,8 @@ const AddCompletions = @import("./cli/add_completions.zig");
|
||||
|
||||
pub const PrintBundleCommand = struct {
|
||||
pub fn exec(ctx: Command.Context) !void {
|
||||
@setCold(true);
|
||||
|
||||
const entry_point = ctx.args.entry_points[0];
|
||||
var out_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
var stdout = std.io.getStdOut();
|
||||
@@ -1053,12 +1055,20 @@ pub const Command = struct {
|
||||
}
|
||||
|
||||
if (strings.eqlComptime(extension, ".lockb")) {
|
||||
for (std.os.argv) |arg| {
|
||||
if (strings.eqlComptime(std.mem.span(arg), "--hash")) {
|
||||
try PackageManagerCommand.printHash(ctx, ctx.args.entry_points[0]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try Install.Lockfile.Printer.print(
|
||||
ctx.allocator,
|
||||
ctx.log,
|
||||
ctx.args.entry_points[0],
|
||||
.yarn,
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,6 +247,8 @@ const BUN_CREATE_DIR = ".bun-create";
|
||||
var home_dir_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
pub const CreateCommand = struct {
|
||||
pub fn exec(ctx: Command.Context, _: []const []const u8) !void {
|
||||
@setCold(true);
|
||||
|
||||
Global.configureAllocator(.{ .long_running = false });
|
||||
try NetworkThread.init();
|
||||
|
||||
|
||||
@@ -10,6 +10,34 @@ const Path = @import("../resolver/resolve_path.zig");
|
||||
|
||||
pub const PackageManagerCommand = struct {
|
||||
pub fn printHelp(_: std.mem.Allocator) void {}
|
||||
pub fn printHash(ctx: Command.Context, lockfile_: []const u8) !void {
|
||||
@setCold(true);
|
||||
var lockfile_buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
|
||||
@memcpy(&lockfile_buffer, lockfile_.ptr, lockfile_.len);
|
||||
lockfile_buffer[lockfile_.len] = 0;
|
||||
var lockfile = lockfile_buffer[0..lockfile_.len :0];
|
||||
var pm = try PackageManager.init(ctx, null, &PackageManager.install_params);
|
||||
|
||||
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, lockfile);
|
||||
if (load_lockfile == .not_found) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Lockfile not found", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
if (load_lockfile == .err) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
Output.flush();
|
||||
Output.disableBuffering();
|
||||
try Output.writer().print("{}", .{load_lockfile.ok.fmtMetaHash()});
|
||||
Output.enableBuffering();
|
||||
Global.exit(0);
|
||||
}
|
||||
|
||||
pub fn exec(ctx: Command.Context) !void {
|
||||
var args = try std.process.argsAlloc(ctx.allocator);
|
||||
args = args[1..];
|
||||
@@ -57,6 +85,62 @@ pub const PackageManagerCommand = struct {
|
||||
|
||||
Output.flush();
|
||||
return;
|
||||
} else if (strings.eqlComptime(first, "hash")) {
|
||||
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
|
||||
if (load_lockfile == .not_found) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Lockfile not found", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
if (load_lockfile == .err) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
_ = try pm.lockfile.hasMetaHashChanged(false);
|
||||
|
||||
Output.flush();
|
||||
Output.disableBuffering();
|
||||
try Output.writer().print("{}", .{load_lockfile.ok.fmtMetaHash()});
|
||||
Output.enableBuffering();
|
||||
Global.exit(0);
|
||||
} else if (strings.eqlComptime(first, "hash-print")) {
|
||||
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
|
||||
if (load_lockfile == .not_found) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Lockfile not found", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
if (load_lockfile == .err) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
Output.flush();
|
||||
Output.disableBuffering();
|
||||
try Output.writer().print("{}", .{load_lockfile.ok.fmtMetaHash()});
|
||||
Output.enableBuffering();
|
||||
Global.exit(0);
|
||||
} else if (strings.eqlComptime(first, "hash-string")) {
|
||||
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
|
||||
if (load_lockfile == .not_found) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Lockfile not found", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
if (load_lockfile == .err) {
|
||||
if (pm.options.log_level != .silent)
|
||||
Output.prettyError("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
_ = try pm.lockfile.hasMetaHashChanged(true);
|
||||
Global.exit(0);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -340,6 +340,8 @@ pub const UpgradeCommand = struct {
|
||||
const exe_subpath = Version.folder_name ++ std.fs.path.sep_str ++ "bun";
|
||||
|
||||
pub fn exec(ctx: Command.Context) !void {
|
||||
@setCold(true);
|
||||
|
||||
_exec(ctx) catch |err| {
|
||||
Output.prettyErrorln("<r>bun upgrade failed with error: <red><b>{s}<r>\n\n<cyan>Please upgrade manually<r>:\n <b>curl https://bun.sh/install | bash<r>\n\n", .{@errorName(err)});
|
||||
Output.flush();
|
||||
|
||||
@@ -2925,6 +2925,7 @@ pub const PackageManager = struct {
|
||||
load_lockfile: bool = true,
|
||||
install_packages: bool = true,
|
||||
save_yarn_lock: bool = false,
|
||||
print_meta_hash_string: bool = false,
|
||||
};
|
||||
|
||||
pub const Enable = struct {
|
||||
@@ -4817,7 +4818,7 @@ pub const PackageManager = struct {
|
||||
NetworkThread.global.pool.sleep_on_idle_network_thread = true;
|
||||
|
||||
const needs_clean_lockfile = had_any_diffs or needs_new_lockfile or manager.package_json_updates.len > 0;
|
||||
|
||||
var did_meta_hash_change = needs_clean_lockfile;
|
||||
if (needs_clean_lockfile) {
|
||||
manager.lockfile = try manager.lockfile.clean(manager.package_json_updates);
|
||||
}
|
||||
@@ -4833,6 +4834,12 @@ pub const PackageManager = struct {
|
||||
manager.lockfile.verifyResolutions(manager.options.local_package_features, manager.options.remote_package_features, log_level);
|
||||
}
|
||||
|
||||
if (needs_clean_lockfile or manager.options.enable.force_save_lockfile) {
|
||||
did_meta_hash_change = try manager.lockfile.hasMetaHashChanged(
|
||||
PackageManager.verbose_install or manager.options.do.print_meta_hash_string,
|
||||
);
|
||||
}
|
||||
|
||||
if (manager.options.global) {
|
||||
try manager.setupGlobalDir(&ctx);
|
||||
}
|
||||
@@ -4843,7 +4850,7 @@ pub const PackageManager = struct {
|
||||
// 2. There is a determinism issue in the file where alignment bytes might be garbage data
|
||||
// This is a bug that needs to be fixed, however we can work around it for now
|
||||
// by avoiding saving the lockfile
|
||||
if (manager.options.do.save_lockfile and (needs_clean_lockfile or
|
||||
if (manager.options.do.save_lockfile and (did_meta_hash_change or
|
||||
manager.lockfile.isEmpty() or
|
||||
manager.options.enable.force_save_lockfile))
|
||||
{
|
||||
@@ -4930,6 +4937,12 @@ pub const PackageManager = struct {
|
||||
try Lockfile.Printer.Tree.print(&printer, Output.WriterType, Output.writer(), false);
|
||||
}
|
||||
|
||||
if (!did_meta_hash_change) {
|
||||
manager.summary.remove = 0;
|
||||
manager.summary.add = 0;
|
||||
manager.summary.update = 0;
|
||||
}
|
||||
|
||||
var printed_timestamp = false;
|
||||
if (install_summary.success > 0) {
|
||||
// it's confusing when it shows 3 packages and says it installed 1
|
||||
|
||||
@@ -80,6 +80,9 @@ const JSAst = @import("../js_ast.zig");
|
||||
const Origin = @import("./install.zig").Origin;
|
||||
const PackageIDMultiple = @import("./install.zig").PackageIDMultiple;
|
||||
|
||||
pub const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8;
|
||||
const zero_hash = std.mem.zeroes(MetaHash);
|
||||
|
||||
pub const ExternalStringBuilder = StructBuilder.Builder(ExternalString);
|
||||
pub const SmallExternalStringList = ExternalSlice(String);
|
||||
|
||||
@@ -91,6 +94,8 @@ format: FormatVersion = .v1,
|
||||
/// Eventually, this will be a relative path to a parent lockfile
|
||||
workspace_path: string = "",
|
||||
|
||||
meta_hash: MetaHash = zero_hash,
|
||||
|
||||
packages: Lockfile.Package.List = Lockfile.Package.List{},
|
||||
buffers: Buffers = Buffers{},
|
||||
|
||||
@@ -649,6 +654,31 @@ pub fn clean(old: *Lockfile, updates: []PackageManager.UpdateRequest) !*Lockfile
|
||||
return new;
|
||||
}
|
||||
|
||||
pub const MetaHashFormatter = struct {
|
||||
meta_hash: *const MetaHash,
|
||||
|
||||
pub fn format(this: MetaHashFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
|
||||
var remain: []const u8 = this.meta_hash[0..];
|
||||
|
||||
try std.fmt.format(
|
||||
writer,
|
||||
"{}-{}-{}-{}",
|
||||
.{
|
||||
std.fmt.fmtSliceHexUpper(remain[0..8]),
|
||||
std.fmt.fmtSliceHexLower(remain[8..16]),
|
||||
std.fmt.fmtSliceHexUpper(remain[16..24]),
|
||||
std.fmt.fmtSliceHexLower(remain[24..32]),
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn fmtMetaHash(this: *const Lockfile) MetaHashFormatter {
|
||||
return .{
|
||||
.meta_hash = &this.meta_hash,
|
||||
};
|
||||
}
|
||||
|
||||
pub const TreeFiller = std.fifo.LinearFifo([2]PackageID, .Dynamic);
|
||||
|
||||
const Cloner = struct {
|
||||
@@ -821,6 +851,8 @@ pub const Printer = struct {
|
||||
lockfile_path_: string,
|
||||
format: Format,
|
||||
) !void {
|
||||
@setCold(true);
|
||||
|
||||
var lockfile_path: stringZ = "";
|
||||
|
||||
if (!std.fs.path.isAbsolute(lockfile_path_)) {
|
||||
@@ -1102,8 +1134,11 @@ pub const Printer = struct {
|
||||
try writer.writeAll(
|
||||
\\# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
\\# yarn lockfile v1
|
||||
\\
|
||||
\\
|
||||
\\# bun ./bun.lockb --hash:
|
||||
);
|
||||
try writer.print(
|
||||
"{}\n\n",
|
||||
.{this.lockfile.fmtMetaHash()},
|
||||
);
|
||||
|
||||
try Yarn.packages(this, Writer, writer);
|
||||
@@ -2813,6 +2848,9 @@ pub const Serializer = struct {
|
||||
var writer = stream.writer();
|
||||
try writer.writeAll(header_bytes);
|
||||
try writer.writeIntLittle(u32, @enumToInt(this.format));
|
||||
|
||||
try writer.writeAll(&this.meta_hash);
|
||||
|
||||
const pos = try stream.getPos();
|
||||
try writer.writeIntLittle(u64, 0);
|
||||
|
||||
@@ -2848,8 +2886,12 @@ pub const Serializer = struct {
|
||||
if (format != @enumToInt(Lockfile.FormatVersion.current)) {
|
||||
return error.@"Outdated lockfile version";
|
||||
}
|
||||
|
||||
lockfile.format = Lockfile.FormatVersion.current;
|
||||
lockfile.allocator = allocator;
|
||||
|
||||
_ = try reader.readAll(&lockfile.meta_hash);
|
||||
|
||||
const total_buffer_size = try reader.readIntLittle(u64);
|
||||
if (total_buffer_size > stream.buffer.len) {
|
||||
return error.@"Lockfile is missing data";
|
||||
@@ -2864,6 +2906,7 @@ pub const Serializer = struct {
|
||||
if ((try stream.reader().readIntLittle(u64)) != 0) {
|
||||
return error.@"Lockfile is malformed (expected 0 at the end)";
|
||||
}
|
||||
|
||||
std.debug.assert(stream.pos == total_buffer_size);
|
||||
|
||||
load_workspace: {
|
||||
@@ -2892,3 +2935,76 @@ pub const Serializer = struct {
|
||||
// const end = try reader.readIntLittle(u64);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn hasMetaHashChanged(this: *Lockfile, print_name_version_string: bool) !bool {
|
||||
const previous_meta_hash = this.meta_hash;
|
||||
this.meta_hash = try this.generateMetaHash(print_name_version_string);
|
||||
return !strings.eqlLong(&previous_meta_hash, &this.meta_hash, false);
|
||||
}
|
||||
pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash {
|
||||
if (this.packages.len <= 1)
|
||||
return zero_hash;
|
||||
|
||||
var string_builder = GlobalStringBuilder{};
|
||||
defer string_builder.deinit(this.allocator);
|
||||
const names: []const String = this.packages.items(.name);
|
||||
const resolutions: []const Resolution = this.packages.items(.resolution);
|
||||
const bytes = this.buffers.string_bytes.items;
|
||||
var alphabetized_names = try this.allocator.alloc(PackageID, this.packages.len -| 1);
|
||||
defer this.allocator.free(alphabetized_names);
|
||||
|
||||
const hash_prefix = "\n-- BEGIN SHA512/256(`${alphabetize(name)}@${order(version)}`) --\n";
|
||||
const hash_suffix = "-- END HASH--\n";
|
||||
string_builder.cap += hash_prefix.len + hash_suffix.len;
|
||||
{
|
||||
var i: usize = 1;
|
||||
|
||||
while (i + 16 < this.packages.len) : (i += 16) {
|
||||
comptime var j: usize = 0;
|
||||
inline while (j < 16) : (j += 1) {
|
||||
alphabetized_names[(i + j) - 1] = @truncate(PackageID, (i + j));
|
||||
string_builder.fmtCount("{s}@{}\n", .{ names[i + j].slice(bytes), resolutions[i + j].fmt(bytes) });
|
||||
}
|
||||
}
|
||||
|
||||
while (i < this.packages.len) : (i += 1) {
|
||||
alphabetized_names[i - 1] = @truncate(PackageID, i);
|
||||
string_builder.fmtCount("{s}@{}\n", .{ names[i].slice(bytes), resolutions[i].fmt(bytes) });
|
||||
}
|
||||
}
|
||||
|
||||
std.sort.sort(
|
||||
PackageID,
|
||||
alphabetized_names,
|
||||
Lockfile.Package.Alphabetizer{
|
||||
.names = names,
|
||||
.buf = bytes,
|
||||
.resolutions = resolutions,
|
||||
},
|
||||
Lockfile.Package.Alphabetizer.isAlphabetical,
|
||||
);
|
||||
|
||||
string_builder.allocate(this.allocator) catch unreachable;
|
||||
string_builder.ptr.?[0..hash_prefix.len].* = hash_prefix.*;
|
||||
string_builder.len += hash_prefix.len;
|
||||
|
||||
for (alphabetized_names) |i| {
|
||||
_ = string_builder.fmt("{s}@{}\n", .{ names[i].slice(bytes), resolutions[i].fmt(bytes) });
|
||||
}
|
||||
|
||||
string_builder.ptr.?[string_builder.len..string_builder.cap][0..hash_suffix.len].* = hash_suffix.*;
|
||||
string_builder.len += hash_suffix.len;
|
||||
|
||||
const alphabetized_name_version_string = string_builder.ptr.?[0..string_builder.len];
|
||||
if (print_name_version_string) {
|
||||
Output.flush();
|
||||
Output.disableBuffering();
|
||||
Output.writer().writeAll(alphabetized_name_version_string) catch unreachable;
|
||||
Output.enableBuffering();
|
||||
}
|
||||
|
||||
var digest = zero_hash;
|
||||
std.crypto.hash.sha2.Sha512256.hash(alphabetized_name_version_string, &digest, .{});
|
||||
|
||||
return digest;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ const string = @import("string_types.zig").string;
|
||||
const Allocator = @import("std").mem.Allocator;
|
||||
const assert = @import("std").debug.assert;
|
||||
const copy = @import("std").mem.copy;
|
||||
const Env = @import("./env.zig");
|
||||
|
||||
const StringBuilder = @This();
|
||||
|
||||
@@ -19,28 +20,46 @@ pub fn allocate(this: *StringBuilder, allocator: Allocator) !void {
|
||||
this.len = 0;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *StringBuilder, allocator: Allocator) void {
|
||||
if (this.ptr == null or this.cap == 0) return;
|
||||
allocator.free(this.ptr.?[0..this.cap]);
|
||||
}
|
||||
|
||||
pub fn append(this: *StringBuilder, slice: string) string {
|
||||
assert(this.len <= this.cap); // didn't count everything
|
||||
assert(this.ptr != null); // must call allocate first
|
||||
if (Env.allow_assert) {
|
||||
assert(this.len <= this.cap); // didn't count everything
|
||||
assert(this.ptr != null); // must call allocate first
|
||||
}
|
||||
|
||||
copy(u8, this.ptr.?[this.len..this.cap], slice);
|
||||
const result = this.ptr.?[this.len..this.cap][0..slice.len];
|
||||
this.len += slice.len;
|
||||
|
||||
assert(this.len <= this.cap);
|
||||
if (Env.allow_assert) {
|
||||
assert(this.len <= this.cap);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
pub fn fmt(this: *StringBuilder, comptime str: string, args: anytype) string {
|
||||
assert(this.len <= this.cap); // didn't count everything
|
||||
assert(this.ptr != null); // must call allocate first
|
||||
if (Env.allow_assert) {
|
||||
assert(this.len <= this.cap); // didn't count everything
|
||||
assert(this.ptr != null); // must call allocate first
|
||||
}
|
||||
|
||||
var buf = this.ptr.?[this.len..this.cap];
|
||||
const out = std.fmt.bufPrint(buf, str, args) catch unreachable;
|
||||
this.len += out.len;
|
||||
|
||||
assert(this.len <= this.cap);
|
||||
if (Env.allow_assert) {
|
||||
assert(this.len <= this.cap);
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
pub fn fmtCount(this: *StringBuilder, comptime str: string, args: anytype) void {
|
||||
this.cap += std.fmt.count(str, args);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user