mirror of
https://github.com/oven-sh/bun
synced 2026-02-16 05:42:43 +00:00
Merge branch 'main' into jarred/process-change
This commit is contained in:
@@ -83,6 +83,7 @@ const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
|
||||
const ThreadSafeFunction = JSC.napi.ThreadSafeFunction;
|
||||
const PackageManager = @import("../install/install.zig").PackageManager;
|
||||
const IPC = @import("ipc.zig");
|
||||
pub const GenericWatcher = @import("../watcher.zig");
|
||||
|
||||
const ModuleLoader = JSC.ModuleLoader;
|
||||
const FetchFlags = JSC.FetchFlags;
|
||||
@@ -430,22 +431,22 @@ pub const ImportWatcher = union(enum) {
|
||||
|
||||
pub fn start(this: ImportWatcher) !void {
|
||||
switch (this) {
|
||||
inline .hot => |watcher| try watcher.start(),
|
||||
inline .watch => |watcher| try watcher.start(),
|
||||
inline .hot => |w| try w.start(),
|
||||
inline .watch => |w| try w.start(),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub inline fn watchlist(this: ImportWatcher) Watcher.WatchListArray {
|
||||
pub inline fn watchlist(this: ImportWatcher) GenericWatcher.WatchList {
|
||||
return switch (this) {
|
||||
inline .hot, .watch => |wacher| wacher.watchlist,
|
||||
inline .hot, .watch => |w| w.watchlist,
|
||||
else => .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn indexOf(this: ImportWatcher, hash: Watcher.HashType) ?u32 {
|
||||
pub inline fn indexOf(this: ImportWatcher, hash: GenericWatcher.HashType) ?u32 {
|
||||
return switch (this) {
|
||||
inline .hot, .watch => |wacher| wacher.indexOf(hash),
|
||||
inline .hot, .watch => |w| w.indexOf(hash),
|
||||
else => null,
|
||||
};
|
||||
}
|
||||
@@ -454,7 +455,7 @@ pub const ImportWatcher = union(enum) {
|
||||
this: ImportWatcher,
|
||||
fd: StoredFileDescriptorType,
|
||||
file_path: string,
|
||||
hash: Watcher.HashType,
|
||||
hash: GenericWatcher.HashType,
|
||||
loader: options.Loader,
|
||||
dir_fd: StoredFileDescriptorType,
|
||||
package_json: ?*PackageJSON,
|
||||
@@ -2146,7 +2147,7 @@ pub const VirtualMachine = struct {
|
||||
pub fn reloadEntryPoint(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise {
|
||||
this.has_loaded = false;
|
||||
this.main = entry_path;
|
||||
this.main_hash = bun.JSC.Watcher.getHash(entry_path);
|
||||
this.main_hash = GenericWatcher.getHash(entry_path);
|
||||
|
||||
try this.entry_point.generate(
|
||||
this.allocator,
|
||||
@@ -2184,7 +2185,7 @@ pub const VirtualMachine = struct {
|
||||
pub fn reloadEntryPointForTestRunner(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise {
|
||||
this.has_loaded = false;
|
||||
this.main = entry_path;
|
||||
this.main_hash = bun.JSC.Watcher.getHash(entry_path);
|
||||
this.main_hash = GenericWatcher.getHash(entry_path);
|
||||
|
||||
this.eventLoop().ensureWaker();
|
||||
|
||||
@@ -3078,11 +3079,10 @@ extern fn BunDebugger__willHotReload() void;
|
||||
|
||||
pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime reload_immediately: bool) type {
|
||||
return struct {
|
||||
const watcher = @import("../watcher.zig");
|
||||
pub const Watcher = watcher.NewWatcher(*@This());
|
||||
pub const Watcher = GenericWatcher.NewWatcher(*@This());
|
||||
const Reloader = @This();
|
||||
|
||||
onAccept: std.ArrayHashMapUnmanaged(@This().Watcher.HashType, bun.BabyList(OnAcceptCallback), bun.ArrayIdentityContext, false) = .{},
|
||||
onAccept: std.ArrayHashMapUnmanaged(GenericWatcher.HashType, bun.BabyList(OnAcceptCallback), bun.ArrayIdentityContext, false) = .{},
|
||||
ctx: *Ctx,
|
||||
verbose: bool = false,
|
||||
|
||||
@@ -3221,7 +3221,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
// - Directories outside the root directory
|
||||
// - Directories inside node_modules
|
||||
if (std.mem.indexOf(u8, file_path, "node_modules") == null and std.mem.indexOf(u8, file_path, watch.fs.top_level_dir) != null) {
|
||||
watch.addDirectory(dir_fd, file_path, @This().Watcher.getHash(file_path), false) catch {};
|
||||
watch.addDirectory(dir_fd, file_path, GenericWatcher.getHash(file_path), false) catch {};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3254,9 +3254,9 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
|
||||
pub fn onFileUpdate(
|
||||
this: *@This(),
|
||||
events: []watcher.WatchEvent,
|
||||
events: []GenericWatcher.WatchEvent,
|
||||
changed_files: []?[:0]u8,
|
||||
watchlist: watcher.Watchlist,
|
||||
watchlist: GenericWatcher.WatchList,
|
||||
) void {
|
||||
var slice = watchlist.slice();
|
||||
const file_paths = slice.items(.file_path);
|
||||
@@ -3318,6 +3318,13 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
}
|
||||
},
|
||||
.directory => {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows we receive file events for all items affected by a directory change
|
||||
// so we only need to clear the directory cache. all other effects will be handled
|
||||
// by the file events
|
||||
resolver.bustDirCache(file_path);
|
||||
continue;
|
||||
}
|
||||
var affected_buf: [128][]const u8 = undefined;
|
||||
var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null;
|
||||
|
||||
@@ -3368,7 +3375,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
resolver.bustDirCache(file_path);
|
||||
|
||||
if (entries_option) |dir_ent| {
|
||||
var last_file_hash: @This().Watcher.HashType = std.math.maxInt(@This().Watcher.HashType);
|
||||
var last_file_hash: GenericWatcher.HashType = std.math.maxInt(GenericWatcher.HashType);
|
||||
|
||||
for (affected) |changed_name_| {
|
||||
const changed_name: []const u8 = if (comptime Environment.isMac)
|
||||
@@ -3381,14 +3388,14 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
var prev_entry_id: usize = std.math.maxInt(usize);
|
||||
if (loader != .file) {
|
||||
var path_string: bun.PathString = undefined;
|
||||
var file_hash: @This().Watcher.HashType = last_file_hash;
|
||||
var file_hash: GenericWatcher.HashType = last_file_hash;
|
||||
const abs_path: string = brk: {
|
||||
if (dir_ent.entries.get(@as([]const u8, @ptrCast(changed_name)))) |file_ent| {
|
||||
// reset the file descriptor
|
||||
file_ent.entry.cache.fd = .zero;
|
||||
file_ent.entry.need_stat = true;
|
||||
path_string = file_ent.entry.abs_path;
|
||||
file_hash = @This().Watcher.getHash(path_string.slice());
|
||||
file_hash = GenericWatcher.getHash(path_string.slice());
|
||||
for (hashes, 0..) |hash, entry_id| {
|
||||
if (hash == file_hash) {
|
||||
if (file_descriptors[entry_id] != .zero) {
|
||||
@@ -3416,7 +3423,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
|
||||
|
||||
@memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len..][0..changed_name.len], changed_name);
|
||||
const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1];
|
||||
file_hash = @This().Watcher.getHash(path_slice);
|
||||
file_hash = GenericWatcher.getHash(path_slice);
|
||||
break :brk path_slice;
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user