mirror of
https://github.com/oven-sh/bun
synced 2026-02-11 11:29:02 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
594 lines
22 KiB
Zig
594 lines
22 KiB
Zig
pub const PackageID = bun.install.PackageID;
|
|
pub const DependencyID = bun.install.DependencyID;
|
|
|
|
pub const Lockfile = @import("./lockfile.zig");
|
|
pub const PatchedDep = Lockfile.PatchedDep;
|
|
|
|
pub const Resolution = @import("./resolution.zig").Resolution;
|
|
|
|
pub const PackageInstall = bun.install.PackageInstall;
|
|
|
|
pub const bun_hash_tag = bun.install.bun_hash_tag;
|
|
pub const max_hex_hash_len: comptime_int = brk: {
|
|
var buf: [128]u8 = undefined;
|
|
break :brk (std.fmt.bufPrint(buf[0..], "{x}", .{std.math.maxInt(u64)}) catch @panic("Buf wasn't big enough.")).len;
|
|
};
|
|
pub const max_buntag_hash_buf_len: comptime_int = max_hex_hash_len + bun_hash_tag.len + 1;
|
|
pub const BuntagHashBuf = [max_buntag_hash_buf_len]u8;
|
|
|
|
pub const PatchTask = struct {
|
|
manager: *PackageManager,
|
|
tempdir: std.fs.Dir,
|
|
project_dir: []const u8,
|
|
callback: union(enum) {
|
|
calc_hash: CalcPatchHash,
|
|
apply: ApplyPatch,
|
|
},
|
|
task: ThreadPool.Task = .{
|
|
.callback = runFromThreadPool,
|
|
},
|
|
pre: bool = false,
|
|
next: ?*PatchTask = null,
|
|
|
|
const debug = bun.Output.scoped(.InstallPatch, .visible);
|
|
|
|
const Maybe = bun.sys.Maybe;
|
|
|
|
const CalcPatchHash = struct {
|
|
patchfile_path: []const u8,
|
|
name_and_version_hash: u64,
|
|
|
|
state: ?EnqueueAfterState = null,
|
|
|
|
result: ?u64 = null,
|
|
|
|
logger: logger.Log,
|
|
|
|
const EnqueueAfterState = struct {
|
|
pkg_id: PackageID,
|
|
dependency_id: DependencyID,
|
|
url: string,
|
|
};
|
|
};
|
|
|
|
const ApplyPatch = struct {
|
|
pkg_id: PackageID,
|
|
patch_hash: u64,
|
|
name_and_version_hash: u64,
|
|
|
|
patchfilepath: []const u8,
|
|
pkgname: String,
|
|
|
|
cache_dir: std.fs.Dir,
|
|
cache_dir_subpath: stringZ,
|
|
cache_dir_subpath_without_patch_hash: stringZ,
|
|
|
|
/// this is non-null if this was called before a Task, for example extracting
|
|
task_id: ?Task.Id = null,
|
|
install_context: ?struct {
|
|
dependency_id: DependencyID,
|
|
tree_id: Lockfile.Tree.Id,
|
|
path: std.array_list.Managed(u8),
|
|
} = null,
|
|
// dependency_id: ?struct = null,
|
|
|
|
logger: logger.Log,
|
|
};
|
|
|
|
pub fn deinit(this: *PatchTask) void {
|
|
switch (this.callback) {
|
|
.apply => {
|
|
this.manager.allocator.free(this.callback.apply.patchfilepath);
|
|
this.manager.allocator.free(this.callback.apply.cache_dir_subpath);
|
|
if (this.callback.apply.install_context) |ictx| ictx.path.deinit();
|
|
this.callback.apply.logger.deinit();
|
|
},
|
|
.calc_hash => {
|
|
// TODO: how to deinit `this.callback.calc_hash.network_task`
|
|
if (this.callback.calc_hash.state) |state| this.manager.allocator.free(state.url);
|
|
this.callback.calc_hash.logger.deinit();
|
|
this.manager.allocator.free(this.callback.calc_hash.patchfile_path);
|
|
},
|
|
}
|
|
bun.destroy(this);
|
|
}
|
|
|
|
pub fn runFromThreadPool(task: *ThreadPool.Task) void {
|
|
var patch_task: *PatchTask = @fieldParentPtr("task", task);
|
|
patch_task.runFromThreadPoolImpl();
|
|
}
|
|
|
|
pub fn runFromThreadPoolImpl(this: *PatchTask) void {
|
|
debug("runFromThreadPoolImpl {s}", .{@tagName(this.callback)});
|
|
defer {
|
|
defer this.manager.wake();
|
|
this.manager.patch_task_queue.push(this);
|
|
}
|
|
switch (this.callback) {
|
|
.calc_hash => {
|
|
this.callback.calc_hash.result = this.calcHash();
|
|
},
|
|
.apply => {
|
|
bun.handleOom(this.apply());
|
|
},
|
|
}
|
|
}
|
|
|
|
pub fn runFromMainThread(
|
|
this: *PatchTask,
|
|
manager: *PackageManager,
|
|
log_level: PackageManager.Options.LogLevel,
|
|
) !void {
|
|
debug("runFromThreadMainThread {s}", .{@tagName(this.callback)});
|
|
defer {
|
|
if (this.pre) _ = manager.pending_pre_calc_hashes.fetchSub(1, .monotonic);
|
|
}
|
|
switch (this.callback) {
|
|
.calc_hash => try this.runFromMainThreadCalcHash(manager, log_level),
|
|
.apply => this.runFromMainThreadApply(manager),
|
|
}
|
|
}
|
|
|
|
pub fn runFromMainThreadApply(this: *PatchTask, manager: *PackageManager) void {
|
|
_ = manager; // autofix
|
|
if (this.callback.apply.logger.errors > 0) {
|
|
defer this.callback.apply.logger.deinit();
|
|
Output.errGeneric("failed to apply patchfile ({s})", .{this.callback.apply.patchfilepath});
|
|
this.callback.apply.logger.print(Output.errorWriter()) catch {};
|
|
}
|
|
}
|
|
|
|
fn runFromMainThreadCalcHash(
|
|
this: *PatchTask,
|
|
manager: *PackageManager,
|
|
log_level: PackageManager.Options.LogLevel,
|
|
) !void {
|
|
// TODO only works for npm package
|
|
// need to switch on version.tag and handle each case appropriately
|
|
const calc_hash = &this.callback.calc_hash;
|
|
const hash = calc_hash.result orelse {
|
|
if (log_level != .silent) {
|
|
if (calc_hash.logger.hasErrors()) {
|
|
calc_hash.logger.print(Output.errorWriter()) catch {};
|
|
} else {
|
|
Output.errGeneric("Failed to calculate hash for patch <b>{s}<r>", .{this.callback.calc_hash.patchfile_path});
|
|
}
|
|
}
|
|
Global.crash();
|
|
};
|
|
|
|
var gop = bun.handleOom(manager.lockfile.patched_dependencies.getOrPut(manager.allocator, calc_hash.name_and_version_hash));
|
|
if (gop.found_existing) {
|
|
gop.value_ptr.setPatchfileHash(hash);
|
|
} else @panic("No entry for patched dependency, this is a bug in Bun.");
|
|
|
|
if (calc_hash.state) |state| {
|
|
const url = state.url;
|
|
const pkg_id = state.pkg_id;
|
|
const dep_id = state.dependency_id;
|
|
|
|
const pkg = manager.lockfile.packages.get(pkg_id);
|
|
|
|
var out_name_and_version_hash: ?u64 = null;
|
|
var out_patchfile_hash: ?u64 = null;
|
|
manager.setPreinstallState(pkg.meta.id, manager.lockfile, .unknown);
|
|
switch (manager.determinePreinstallState(pkg, manager.lockfile, &out_name_and_version_hash, &out_patchfile_hash)) {
|
|
.done => {
|
|
// patched pkg in folder path, should now be handled by PackageInstall.install()
|
|
debug("pkg: {s} done", .{pkg.name.slice(manager.lockfile.buffers.string_bytes.items)});
|
|
},
|
|
.extract => {
|
|
debug("pkg: {s} extract", .{pkg.name.slice(manager.lockfile.buffers.string_bytes.items)});
|
|
|
|
const task_id = Task.Id.forNPMPackage(manager.lockfile.str(&pkg.name), pkg.resolution.value.npm.version);
|
|
bun.debugAssert(!manager.network_dedupe_map.contains(task_id));
|
|
|
|
const network_task = try manager.generateNetworkTaskForTarball(
|
|
// TODO: not just npm package
|
|
task_id,
|
|
url,
|
|
manager.lockfile.buffers.dependencies.items[dep_id].behavior.isRequired(),
|
|
dep_id,
|
|
pkg,
|
|
this.callback.calc_hash.name_and_version_hash,
|
|
switch (pkg.resolution.tag) {
|
|
.npm => .allow_authorization,
|
|
else => .no_authorization,
|
|
},
|
|
) orelse unreachable;
|
|
if (manager.getPreinstallState(pkg.meta.id) == .extract) {
|
|
manager.setPreinstallState(pkg.meta.id, manager.lockfile, .extracting);
|
|
manager.enqueueNetworkTask(network_task);
|
|
}
|
|
},
|
|
.apply_patch => {
|
|
debug("pkg: {s} apply patch", .{pkg.name.slice(manager.lockfile.buffers.string_bytes.items)});
|
|
const patch_task = PatchTask.newApplyPatchHash(
|
|
manager,
|
|
pkg.meta.id,
|
|
hash,
|
|
this.callback.calc_hash.name_and_version_hash,
|
|
);
|
|
if (manager.getPreinstallState(pkg.meta.id) == .apply_patch) {
|
|
manager.setPreinstallState(pkg.meta.id, manager.lockfile, .applying_patch);
|
|
manager.enqueuePatchTask(patch_task);
|
|
}
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
}
|
|
|
|
// 1. Parse patch file
|
|
// 2. Create temp dir to do all the modifications
|
|
// 3. Copy un-patched pkg into temp dir
|
|
// 4. Apply patches to pkg in temp dir
|
|
// 5. Add bun tag for patch hash
|
|
// 6. rename() newly patched pkg to cache
|
|
pub fn apply(this: *PatchTask) bun.OOM!void {
|
|
var log = &this.callback.apply.logger;
|
|
debug("apply patch task", .{});
|
|
bun.assert(this.callback == .apply);
|
|
|
|
const patch: *const ApplyPatch = &this.callback.apply;
|
|
const dir = this.project_dir;
|
|
const patchfile_path = patch.patchfilepath;
|
|
|
|
var absolute_patchfile_path_buf: bun.PathBuffer = undefined;
|
|
// 1. Parse the patch file
|
|
const absolute_patchfile_path = bun.path.joinZBuf(&absolute_patchfile_path_buf, &[_][]const u8{
|
|
dir,
|
|
patchfile_path,
|
|
}, .auto);
|
|
// TODO: can the patch file be anything other than utf-8?
|
|
|
|
const patchfile_txt = switch (bun.sys.File.readFrom(
|
|
bun.FD.cwd(),
|
|
absolute_patchfile_path,
|
|
this.manager.allocator,
|
|
)) {
|
|
.result => |txt| txt,
|
|
.err => |e| {
|
|
try log.addSysError(
|
|
this.manager.allocator,
|
|
e,
|
|
"failed to read patchfile",
|
|
.{},
|
|
);
|
|
return;
|
|
},
|
|
};
|
|
defer this.manager.allocator.free(patchfile_txt);
|
|
var patchfile = bun.patch.parsePatchFile(patchfile_txt) catch |e| {
|
|
try log.addErrorFmtOpts(
|
|
this.manager.allocator,
|
|
"failed to parse patchfile: {s}",
|
|
.{@errorName(e)},
|
|
.{},
|
|
);
|
|
return;
|
|
};
|
|
defer patchfile.deinit(bun.default_allocator);
|
|
|
|
// 2. Create temp dir to do all the modifications
|
|
var tmpname_buf: [1024]u8 = undefined;
|
|
const tempdir_name = bun.fs.FileSystem.tmpname("tmp", &tmpname_buf, bun.fastRandom()) catch |err| switch (err) {
|
|
// max len is 1+16+1+8+3, well below 1024
|
|
error.NoSpaceLeft => unreachable,
|
|
};
|
|
|
|
const system_tmpdir = this.tempdir;
|
|
|
|
const pkg_name = this.callback.apply.pkgname;
|
|
|
|
const dummy_node_modules: PackageManager.PackageInstaller.NodeModulesFolder = .{
|
|
.path = std.array_list.Managed(u8).init(this.manager.allocator),
|
|
.tree_id = 0,
|
|
};
|
|
|
|
const resolution_label, const resolution_tag = brk: {
|
|
// TODO: fix this threadsafety issue.
|
|
const resolution = &this.manager.lockfile.packages.items(.resolution)[patch.pkg_id];
|
|
break :brk .{ bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{f}", .{resolution.fmt(this.manager.lockfile.buffers.string_bytes.items, .posix)})), resolution.tag };
|
|
};
|
|
defer this.manager.allocator.free(resolution_label);
|
|
|
|
// 3. copy the unpatched files into temp dir
|
|
var pkg_install: PackageInstall = .{
|
|
.allocator = bun.default_allocator,
|
|
.cache_dir = this.callback.apply.cache_dir,
|
|
.cache_dir_subpath = this.callback.apply.cache_dir_subpath_without_patch_hash,
|
|
.destination_dir_subpath = tempdir_name,
|
|
.destination_dir_subpath_buf = tmpname_buf[0..],
|
|
.patch = null,
|
|
.progress = null,
|
|
.package_name = pkg_name,
|
|
.package_version = resolution_label,
|
|
// dummy value
|
|
.node_modules = &dummy_node_modules,
|
|
.lockfile = this.manager.lockfile,
|
|
};
|
|
|
|
switch (pkg_install.install(true, system_tmpdir, .copyfile, resolution_tag)) {
|
|
.success => {},
|
|
.failure => |reason| {
|
|
return try log.addErrorFmtOpts(
|
|
this.manager.allocator,
|
|
"{s} while executing step: {s}",
|
|
.{ @errorName(reason.err), reason.step.name() },
|
|
.{},
|
|
);
|
|
},
|
|
}
|
|
|
|
{
|
|
const patch_pkg_dir = switch (bun.sys.openat(
|
|
.fromStdDir(system_tmpdir),
|
|
tempdir_name,
|
|
bun.O.RDONLY | bun.O.DIRECTORY,
|
|
0,
|
|
)) {
|
|
.result => |fd| fd,
|
|
.err => |e| return try log.addSysError(
|
|
this.manager.allocator,
|
|
e,
|
|
"failed trying to open temporary dir to apply patch to package: {s}",
|
|
.{resolution_label},
|
|
),
|
|
};
|
|
defer patch_pkg_dir.close();
|
|
|
|
// 4. apply patch
|
|
if (patchfile.apply(this.manager.allocator, patch_pkg_dir)) |e| {
|
|
return try log.addErrorFmtOpts(
|
|
this.manager.allocator,
|
|
"failed applying patch file: {f}",
|
|
.{e},
|
|
.{},
|
|
);
|
|
}
|
|
|
|
// 5. Add bun tag
|
|
const bun_tag_prefix = bun_hash_tag;
|
|
var buntagbuf: BuntagHashBuf = undefined;
|
|
@memcpy(buntagbuf[0..bun_tag_prefix.len], bun_tag_prefix);
|
|
const hashlen = (std.fmt.bufPrint(buntagbuf[bun_tag_prefix.len..], "{x}", .{this.callback.apply.patch_hash}) catch unreachable).len;
|
|
buntagbuf[bun_tag_prefix.len + hashlen] = 0;
|
|
const buntagfd = switch (bun.sys.openat(
|
|
patch_pkg_dir,
|
|
buntagbuf[0 .. bun_tag_prefix.len + hashlen :0],
|
|
bun.O.RDWR | bun.O.CREAT,
|
|
0o666,
|
|
)) {
|
|
.result => |fd| fd,
|
|
.err => |e| {
|
|
return try log.addErrorFmtOpts(
|
|
this.manager.allocator,
|
|
"failed adding bun tag: {f}",
|
|
.{e.withPath(buntagbuf[0 .. bun_tag_prefix.len + hashlen :0])},
|
|
.{},
|
|
);
|
|
},
|
|
};
|
|
buntagfd.close();
|
|
}
|
|
|
|
// 6. rename to cache dir
|
|
var path_in_tmpdir_buf: bun.PathBuffer = undefined;
|
|
const path_in_tmpdir = bun.path.joinZBuf(
|
|
&path_in_tmpdir_buf,
|
|
&[_][]const u8{
|
|
tempdir_name,
|
|
// tempdir_name,
|
|
},
|
|
.auto,
|
|
);
|
|
|
|
if (bun.sys.renameatConcurrently(
|
|
.fromStdDir(system_tmpdir),
|
|
path_in_tmpdir,
|
|
.fromStdDir(this.callback.apply.cache_dir),
|
|
this.callback.apply.cache_dir_subpath,
|
|
.{ .move_fallback = true },
|
|
).asErr()) |e| return try log.addErrorFmtOpts(
|
|
this.manager.allocator,
|
|
"renaming changes to cache dir: {f}",
|
|
.{e.withPath(this.callback.apply.cache_dir_subpath)},
|
|
.{},
|
|
);
|
|
}
|
|
|
|
pub fn calcHash(this: *PatchTask) ?u64 {
|
|
bun.assert(this.callback == .calc_hash);
|
|
var log = &this.callback.calc_hash.logger;
|
|
|
|
const dir = this.project_dir;
|
|
const patchfile_path = this.callback.calc_hash.patchfile_path;
|
|
|
|
var absolute_patchfile_path_buf: bun.PathBuffer = undefined;
|
|
// parse the patch file
|
|
const absolute_patchfile_path = bun.path.joinZBuf(
|
|
&absolute_patchfile_path_buf,
|
|
&[_][]const u8{
|
|
dir,
|
|
patchfile_path,
|
|
},
|
|
.auto,
|
|
);
|
|
|
|
const stat: bun.Stat = switch (bun.sys.stat(absolute_patchfile_path)) {
|
|
.err => |e| {
|
|
if (e.getErrno() == .NOENT) {
|
|
bun.handleOom(log.addErrorFmt(null, Loc.Empty, this.manager.allocator, "Couldn't find patch file: '{s}'\n\nTo create a new patch file run:\n\n <cyan>bun patch {s}<r>", .{
|
|
this.callback.calc_hash.patchfile_path,
|
|
this.manager.lockfile.patched_dependencies.get(this.callback.calc_hash.name_and_version_hash).?.path.slice(this.manager.lockfile.buffers.string_bytes.items),
|
|
}));
|
|
return null;
|
|
}
|
|
log.addWarningFmt(
|
|
null,
|
|
Loc.Empty,
|
|
this.manager.allocator,
|
|
"patchfile <b>{s}<r> is empty, please restore or delete it.",
|
|
.{absolute_patchfile_path},
|
|
) catch |err| bun.handleOom(err);
|
|
return null;
|
|
},
|
|
.result => |s| s,
|
|
};
|
|
const size: u64 = @intCast(stat.size);
|
|
if (size == 0) {
|
|
log.addErrorFmt(
|
|
null,
|
|
Loc.Empty,
|
|
this.manager.allocator,
|
|
"patchfile <b>{s}<r> is empty, please restore or delete it.",
|
|
.{absolute_patchfile_path},
|
|
) catch |err| bun.handleOom(err);
|
|
return null;
|
|
}
|
|
|
|
const fd = switch (bun.sys.open(absolute_patchfile_path, bun.O.RDONLY, 0)) {
|
|
.err => |e| {
|
|
log.addErrorFmt(
|
|
null,
|
|
Loc.Empty,
|
|
this.manager.allocator,
|
|
"failed to open patch file: {f}",
|
|
.{e},
|
|
) catch |err| bun.handleOom(err);
|
|
return null;
|
|
},
|
|
.result => |fd| fd,
|
|
};
|
|
defer fd.close();
|
|
|
|
var hasher = bun.Wyhash11.init(0);
|
|
|
|
// what's a good number for this? page size i guess
|
|
const STACK_SIZE = 16384;
|
|
|
|
var file = bun.sys.File{ .handle = fd };
|
|
var stack: [STACK_SIZE]u8 = undefined;
|
|
var read: usize = 0;
|
|
while (read < size) {
|
|
const slice = switch (file.readFillBuf(stack[0..])) {
|
|
.result => |slice| slice,
|
|
.err => |e| {
|
|
log.addErrorFmt(
|
|
null,
|
|
Loc.Empty,
|
|
this.manager.allocator,
|
|
"failed to read from patch file: {f} ({s})",
|
|
.{ e, absolute_patchfile_path },
|
|
) catch |err| bun.handleOom(err);
|
|
return null;
|
|
},
|
|
};
|
|
if (slice.len == 0) break;
|
|
hasher.update(slice);
|
|
read += slice.len;
|
|
}
|
|
|
|
return hasher.final();
|
|
}
|
|
|
|
pub fn notify(this: *PatchTask) void {
|
|
defer this.manager.wake();
|
|
this.manager.patch_task_queue.push(this);
|
|
}
|
|
|
|
pub fn schedule(this: *PatchTask, batch: *ThreadPool.Batch) void {
|
|
batch.push(ThreadPool.Batch.from(&this.task));
|
|
}
|
|
|
|
pub fn newCalcPatchHash(
|
|
manager: *PackageManager,
|
|
name_and_version_hash: u64,
|
|
state: ?CalcPatchHash.EnqueueAfterState,
|
|
) *PatchTask {
|
|
const patchdep = manager.lockfile.patched_dependencies.get(name_and_version_hash) orelse @panic("This is a bug");
|
|
const patchfile_path = bun.handleOom(manager.allocator.dupeZ(u8, patchdep.path.slice(manager.lockfile.buffers.string_bytes.items)));
|
|
|
|
const pt = bun.new(PatchTask, .{
|
|
.tempdir = manager.getTemporaryDirectory().handle,
|
|
.callback = .{
|
|
.calc_hash = .{
|
|
.state = state,
|
|
.patchfile_path = patchfile_path,
|
|
.name_and_version_hash = name_and_version_hash,
|
|
.logger = logger.Log.init(manager.allocator),
|
|
},
|
|
},
|
|
.manager = manager,
|
|
.project_dir = FileSystem.instance.top_level_dir,
|
|
});
|
|
|
|
return pt;
|
|
}
|
|
|
|
pub fn newApplyPatchHash(
|
|
pkg_manager: *PackageManager,
|
|
pkg_id: PackageID,
|
|
patch_hash: u64,
|
|
name_and_version_hash: u64,
|
|
) *PatchTask {
|
|
const pkg_name = pkg_manager.lockfile.packages.items(.name)[pkg_id];
|
|
|
|
const resolution = &pkg_manager.lockfile.packages.items(.resolution)[pkg_id];
|
|
|
|
var folder_path_buf: bun.PathBuffer = undefined;
|
|
const stuff = pkg_manager.computeCacheDirAndSubpath(
|
|
pkg_name.slice(pkg_manager.lockfile.buffers.string_bytes.items),
|
|
resolution,
|
|
&folder_path_buf,
|
|
patch_hash,
|
|
);
|
|
|
|
const patchfilepath = bun.handleOom(pkg_manager.allocator.dupe(u8, pkg_manager.lockfile.patched_dependencies.get(name_and_version_hash).?.path.slice(pkg_manager.lockfile.buffers.string_bytes.items)));
|
|
|
|
const pt = bun.new(PatchTask, .{
|
|
.tempdir = pkg_manager.getTemporaryDirectory().handle,
|
|
.callback = .{
|
|
.apply = .{
|
|
.pkg_id = pkg_id,
|
|
.patch_hash = patch_hash,
|
|
.name_and_version_hash = name_and_version_hash,
|
|
.cache_dir = stuff.cache_dir,
|
|
.patchfilepath = patchfilepath,
|
|
.pkgname = pkg_name,
|
|
.logger = logger.Log.init(pkg_manager.allocator),
|
|
// need to dupe this as it's calculated using
|
|
// `PackageManager.cached_package_folder_name_buf` which may be
|
|
// modified
|
|
.cache_dir_subpath = bun.handleOom(pkg_manager.allocator.dupeZ(u8, stuff.cache_dir_subpath)),
|
|
.cache_dir_subpath_without_patch_hash = bun.handleOom(pkg_manager.allocator.dupeZ(u8, stuff.cache_dir_subpath[0 .. std.mem.indexOf(u8, stuff.cache_dir_subpath, "_patch_hash=") orelse @panic("This is a bug in Bun.")])),
|
|
},
|
|
},
|
|
.manager = pkg_manager,
|
|
.project_dir = FileSystem.instance.top_level_dir,
|
|
});
|
|
|
|
return pt;
|
|
}
|
|
};
|
|
|
|
const string = []const u8;
|
|
const stringZ = [:0]const u8;
|
|
|
|
const std = @import("std");
|
|
|
|
const Fs = @import("../fs.zig");
|
|
const FileSystem = Fs.FileSystem;
|
|
|
|
const bun = @import("bun");
|
|
const Global = bun.Global;
|
|
const Output = bun.Output;
|
|
const PackageManager = bun.PackageManager;
|
|
const ThreadPool = bun.ThreadPool;
|
|
const String = bun.Semver.String;
|
|
const Task = bun.install.Task;
|
|
|
|
const logger = bun.logger;
|
|
const Loc = logger.Loc;
|