mirror of
https://github.com/oven-sh/bun
synced 2026-02-13 20:39:05 +00:00
Bun gets a new bundler (#2312)
* alright now just gotta try running it
* fix a gajillion compiler errors
* even more code
* okay i fixed more errors
* wip
* Update launch.json
* Update string_builder.zig
* `fast_debug_build_mode` makes debug build 2x faster
* Update bundle_v2.zig
* more code!
* It bundles!
* Rename `Bun.Transpiler` to `Bun.Bundler`
* `import()` expressions almost work
* wip attempt to get import() expr to work
* Bundle namespace imports
* Attempt to fix the issue with import() unsuccessfully
* consider current working directory when resolving relative paths (#2313)
* consider current working directory when resolving relative paths
fixes #2298
* comment test
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
* support `expect().toThrow(/pattern/)` (#2314)
- fix time-zone-dependent test failure
* fix missing `Blob` error messages on Linux (#2315)
* fix & clean up tests (#2318)
- skip flaky tests when running as `root`
- use `expect().toThrow()`
- clean up temporary files after tests
* feat(tty): add some `tty.WriteStream` methods to `process.{stdout, stderr}` (#2320)
* feat(stdio): add some `tty.WriteStream` methods
* chore(builtins): add process builtin gen'd code
* Fix docker install command
* `bun test` on macOS in GitHub Actions (#2322)
* Fixes #2323
* throw invalid parameter errors in `crypto.scryptSync` (#2331)
* throw invalid parameter errors
* remove comptime, add empty buffer function
* remove error_name comptime
* Add reference documentation for bun:test (#2327)
* Reorganize tests (#2332)
* Fix html-rewriter.test.js
* fix the wrong thing being incremented in hmr example (#2334)
* Add more test harness
* Improve Benchmarking page, small fixes (#2339)
* Improve benchmarking page
* WIP
* Add typescript instructions to hot
* Document preload in Plugins. Fix loader in plugin types.
* Fix typo
* Fix links
* run prettier
* Document openInEditor
* improve `Buffer` compatibility with Node.js (#2341)
* improve `Buffer` compatibility with Node.js
* use `memmove()`
allow `encoding` to be `undefined`
* run `bun test` after macOS builds (#2343)
* "binary" is an alias of "latin1"
Fixes https://github.com/oven-sh/bun/issues/2110
* More spec compliant `Blob.prototype.type` (#2340)
* Make `Blob.prototype. type` more spec compliant
* Add a few more checks for isNumber()
* Fix `make headers`
* Safer JSValue.isString()
* More tests for blob.slice
* Make `Blob.prototype.type` more spec compliant
* Add isASCII check
* Fix types
* Fix failing type test
* Update blob.zig
* Update blob.zig
* Fix .eql check on empty values
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
* Fix bug in test runner
* Support `import()` expressions
* Implement `require()`
* clean up bit_set.zig slightly
* Move some things around
* misc cleanup
* Cleanup some things
* Fix a lot of stuff
* Fix `module.exports.fn = fn;` in ESM entry point
* Fix crash due when printing file
* Fix issue with class names
* Fix issue with `export default identifier`
* Update js_parser.zig
* optimization: inline single-property object acceses and arrays
* Fix undefined memory in renamed symbols list
* Handle call target
* wip
* Inline it
* Fix undefined memory issue when reclaiming blocks in ast
* Halt linking on any parse errors
* alias
* Rename `enable_bundling` to `enable_legacy_bundling`
* Workaround anonymous struct literal zig bug
* Use slower approach (without bitset) because it doesn't break after 8 symbols
* Fix incorrectly-renaming statically defined symbols
* Handle more edgecases in our bit_set fork
* Reduce number of allocations for `define`
* Do not rename unbound symbols
* Clean up dot defines a little more
* Make the generated names prettier
* Workaround runtime symbol missing issue
* Fail the build on errors
* Support export * from
* Support `--outfile`
* partially fix renaming
* fanicer symbol renaming impl
* misc, extremely revertible cleanup
* Fix up some bugs with symbol renaming
* formatting
* Update launch.json
* Parse `__PURE__` comments
* clean up simd code for pure comments
* changes to merge
* workaround runtime issue
* Fix issue with `export * as` not propagating correctly
* Make all top-level declarations `var` when bundling
* Fix missing prefix
* Fix assigning to stack copy
* Fix missing runtime symbol
* Fix bug with namespace exports
* Dramatically reduce allocations
* Update launch.json
* Add missing flags
* Update js_parser.zig
* small cleanup
* Make the export name better
* Fix unnecessary `var foo = foo`
* Implement CommonJS -> ESM conversion
* Implement module redirects
* Port esbuild bundler tests for new bundler (#2380)
* started porting esbuild tests
* clean up test names and api before moving on
* port tests using a program i wrote
* replace todo generated comment
* fix generated tests not including some files
* work on tests
* [github web editor] add define, external, inject, minifySyntax, minifyWhitespace options.
* get most of the todo comments out of the way, but expectBundled does not handle most of the cases
* continue working on esbuild tests
* use test.skip for unsupported tests
* Fixups for test runner
* Hoist imports & exports
* Fix test
* Hoist classes
* bundler test refining, 51/835
* Fix runtime require
* bundler test refining, 81/835
* bundler test refining, 93/835
* Make the test work in any timezone
* feat(expect): update toBeInstanceOf (#2396)
* feat: update instanceof binding
* fix: according to PR comments
* Rename `expectObjectTypeCount` to `expectMaxObjectTypeCount`
* Fix socket tests with connection errors (#2403)
* release pending activity with connection error handler
* unref poll_ref
* remove trailing comma
* Organize Dockerfiles for official status
* Remove test Dockerfile
* Remove old Docker workflow
* Feat(test): add toMatch (#2404)
* Fix various fetch/response/request tests (#2416)
* fix most fetch tests, skip a few
* fastGet, toValueGC, and invalid init
* bigint unreachable, range error, log process as process
* remove extra fetch_headers
* remove js_type parameter, check isObject()
* throw invalid mime type error, use enum literal
* switch back to promise rejection
* RangeError pascal case
* Fix several bugs (#2418)
* utf16 codepoint with replacement character
* Fix test failure with `TextEncoder("ascii')`
* Add missing type
* Fix Response.prototype.bodyUsed and Request.prototype.bodyUsed
* Fix bug with scrypt error not clearing
* Update server.zig
* oopsie
* 💅
* docs: Use correct url in the 'Issues' link in README header (#2420)
* Fix crash when rendering error page and the server or network is slow
* [fetch] Make the default body value `null` when unspecified
This is better aligned with the fetch spec
* Make node-net tests less flaky
* [node:net] Fix issue with `listen` callback firing before it's listening
* Always clear timers in node test harness
* Fix out of bounds access
Repro'd in Buffer tests
* Update UWS
cc @cirospaciari
* Make this test more thorough
* Hanging abort test
* 0 length body is a null stream
* Several bug fixes (#2427)
* Fix test
* Fix segfault when unexpected type is passed in `expect().toThrow`
* Fix issues with request constructor
* Don't bother cloning headers when its empty
* woops
* more tests
* fix incorrect test
* Make the fetch error messages better
* Update response.zig
* Fix test that failed on macOS
* Fix test
* Remove extra hash table lookups
* Support running dummy registry directly
cc @alexlamsl
* Update test
* Update test
* fixup
* Workaround crash in test runner
* Fixup test
* Fixup test
* Update os.test.js
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
* Remove usages of port numbers in tests
* Set -O2 and -fno-rtti
* Remove -g
* Prevent undefined memory access
* [bun test] Implement `--rerun-each` flag to run each test N times
* Reduce number of module scopes created
* add some extra abort checks into streams (#2430)
* add some checks to avoid UAF
* avoid multiple calls to finalize if endFromJS is called more than once
* fix no-op comment
* mark as requested_end on abort
* remove requested_end from abort
* remove unnecessary check (#2432)
* Fix bug with scoped aliased dependencies in bun install on macOS
* remove `addLog`, remove `--prominent-compile-errors`
* Finish the upgrade
* Optional chaining flag
* Implement same_target_becomes_destructuring optimization
* bundler test refining, 109/835
* Reset bindings
* Support multiple entry points
* Implement `--entry-names` flag
* Use a tempdir with a better name
* prettier
* Log file name
* Update js_parser.zig
* Mark all bun builtins as external
* Make resolve errors actually errors
* Update bundler_default.test.ts
* Fix `await import(foo)`
* WIP react server components
* Do more stuff at runtime
* ✂️
* Support automatic JSX imports
* Use a module cache for now
* Update tsconfig.base.json
* Fix ThisOutsideFunctionNotRenamed
* woopsie
* moar cpu
* clamp it
* fixup
* Add a bunch of assertions
* Bun uses automatic runtime by default
* Parse Import Attributes
* Add a note about Valgrind
* Update developing.md
* Fix up code splitting for React Server Components
* Implement client component manifest
* Fix crash with --react-server-components and no client components
* Backport 4d31e3c917
* Update launch.json
* Fix for latest zig
* Workaround bug with ?[]const string
Occasionally saw alignment errors in this code
Workaround https://github.com/ziglang/zig/issues/15085
related: https://github.com/ziglang/zig/pull/15089
* switch to regular slice
* Avoid initializing named_imports and named_exports as undefined
* Reduce usages of `undefined`
* Add more assertions
* --watch wip
* Update javascript.zig
* Possibly fix the race condition
* Faster `do`
* bump allocator
* Reduce the size of `Symbol` slightly
* Alphabetically sort runtime import symbols, for determinism
* Prepare for code splitting
* handle overlapping stdout
* pure
* clean up some things
* Fix bug with `$$typeof`
* Address CommonJS -> ESM hoisting bug
* Support `"use server"` in manifest
* Implement `"use server"`
* Fix importing bun builtins when bundling
* Make `commonjs_to_esm` a feature flag, fix some splitting bugs
* ✂️
* fixme remove this
* Fix crash in longestCommonPath
* Chunking! Just need to do import paths now.
* Import paths work...now trying to figure out how to make runtime symbols work
* add workaround
* Replace `bun bun` with `bun build`
* Fix crash with dual package hazard
* Fix many CommonJS <> ESM interop bugs
* Support package.json `"sideEffects"`
also skip loading unnecessary package.json data in `bun run`
* add a not good --watch implementation
* bundler test refining, 140/831
* remove accidentally committed file
* do not return status code 1 on successful bundles
* bundler test refining, 159/830
* pass exit code to exitOrWatch
* clean up help menu
-remove two spaces to line up bun build
-moved all <r> tags to the end of the text they are colorizing
-moved other colors to the start of the text they colorize
-removed unneeded <r> tags, keeping only one at the start of the block
* importstar is fully ported
* wip
* you can run code in this branch now
* Disable this transform
* organize and document bundler tests
* Fix double import
* Fix sloppy mode function declarations
* Disable our CommonJS transform for now
* add `assertNotPresent` to make splitting cases easier
* Bump!
* Update bun.d.ts
* use import.meta.require in runtime code
* Disable this again
* Fix dirname
* Fix ESM -> CJS wrapper
* 💅
---------
Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Alex Lam S.L <alexlamsl@gmail.com>
Co-authored-by: Derrick Farris <mr.dcfarris@gmail.com>
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
Co-authored-by: pfg <pfg@pfg.pw>
Co-authored-by: Colin McDonnell <colinmcd94@gmail.com>
Co-authored-by: dave caruso <me@paperdave.net>
Co-authored-by: zhiyuan <32867472+zhiyuang@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Kamil Ogórek <kamil.ogorek@gmail.com>
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
This commit is contained in:
@@ -79,7 +79,6 @@ const VM = @import("bun").JSC.VM;
|
||||
const JSFunction = @import("bun").JSC.JSFunction;
|
||||
const Config = @import("./config.zig");
|
||||
const URL = @import("../url.zig").URL;
|
||||
const Transpiler = @import("./api/transpiler.zig");
|
||||
const Bun = JSC.API.Bun;
|
||||
const EventLoop = JSC.EventLoop;
|
||||
const PendingResolution = @import("../resolver/resolver.zig").PendingResolution;
|
||||
@@ -2730,290 +2729,315 @@ pub const BuildError = struct {
|
||||
};
|
||||
|
||||
pub const JSPrivateDataTag = JSPrivateDataPtr.Tag;
|
||||
pub const HotReloader = NewHotReloader(VirtualMachine, JSC.EventLoop, false);
|
||||
pub const Watcher = HotReloader.Watcher;
|
||||
|
||||
pub const Watcher = @import("../watcher.zig").NewWatcher(*HotReloader);
|
||||
pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime reload_immediately: bool) type {
|
||||
return struct {
|
||||
const watcher = @import("../watcher.zig");
|
||||
pub const Watcher = watcher.NewWatcher(*@This());
|
||||
const Reloader = @This();
|
||||
|
||||
pub const HotReloader = struct {
|
||||
const watcher = @import("../watcher.zig");
|
||||
onAccept: std.ArrayHashMapUnmanaged(@This().Watcher.HashType, bun.BabyList(OnAcceptCallback), bun.ArrayIdentityContext, false) = .{},
|
||||
ctx: *Ctx,
|
||||
verbose: bool = false,
|
||||
|
||||
onAccept: std.ArrayHashMapUnmanaged(Watcher.HashType, bun.BabyList(OnAcceptCallback), bun.ArrayIdentityContext, false) = .{},
|
||||
vm: *JSC.VirtualMachine,
|
||||
verbose: bool = false,
|
||||
tombstones: std.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{},
|
||||
|
||||
tombstones: std.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{},
|
||||
pub fn eventLoop(this: @This()) *EventLoopType {
|
||||
return this.ctx.eventLoop();
|
||||
}
|
||||
|
||||
pub const HotReloadTask = struct {
|
||||
reloader: *HotReloader,
|
||||
count: u8 = 0,
|
||||
hashes: [8]u32 = [_]u32{0} ** 8,
|
||||
concurrent_task: JSC.ConcurrentTask = undefined,
|
||||
pub fn enqueueTaskConcurrent(this: @This(), task: *JSC.ConcurrentTask) void {
|
||||
if (comptime reload_immediately)
|
||||
unreachable;
|
||||
|
||||
pub fn append(this: *HotReloadTask, id: u32) void {
|
||||
if (this.count == 8) {
|
||||
this.enqueue();
|
||||
var reloader = this.reloader;
|
||||
this.* = .{
|
||||
.reloader = reloader,
|
||||
.count = 0,
|
||||
};
|
||||
this.eventLoop().enqueueTaskConcurrent(task);
|
||||
}
|
||||
|
||||
pub const HotReloadTask = struct {
|
||||
reloader: *Reloader,
|
||||
count: u8 = 0,
|
||||
hashes: [8]u32 = [_]u32{0} ** 8,
|
||||
concurrent_task: JSC.ConcurrentTask = undefined,
|
||||
|
||||
pub fn append(this: *HotReloadTask, id: u32) void {
|
||||
if (this.count == 8) {
|
||||
this.enqueue();
|
||||
var reloader = this.reloader;
|
||||
this.* = .{
|
||||
.reloader = reloader,
|
||||
.count = 0,
|
||||
};
|
||||
}
|
||||
|
||||
this.hashes[this.count] = id;
|
||||
this.count += 1;
|
||||
}
|
||||
|
||||
this.hashes[this.count] = id;
|
||||
this.count += 1;
|
||||
}
|
||||
|
||||
pub fn run(this: *HotReloadTask) void {
|
||||
this.reloader.vm.reload();
|
||||
}
|
||||
|
||||
pub fn enqueue(this: *HotReloadTask) void {
|
||||
if (this.count == 0)
|
||||
return;
|
||||
var that = bun.default_allocator.create(HotReloadTask) catch unreachable;
|
||||
|
||||
that.* = this.*;
|
||||
this.count = 0;
|
||||
that.concurrent_task.task = Task.init(that);
|
||||
that.reloader.vm.eventLoop().enqueueTaskConcurrent(&that.concurrent_task);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *HotReloadTask) void {
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
fn NewCallback(comptime FunctionSignature: type) type {
|
||||
return union(enum) {
|
||||
javascript_callback: JSC.Strong,
|
||||
zig_callback: struct {
|
||||
ptr: *anyopaque,
|
||||
function: *const FunctionSignature,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub const OnAcceptCallback = NewCallback(fn (
|
||||
vm: *JSC.VirtualMachine,
|
||||
specifier: []const u8,
|
||||
) void);
|
||||
|
||||
pub fn enableHotModuleReloading(this: *VirtualMachine) void {
|
||||
if (this.bun_watcher != null)
|
||||
return;
|
||||
|
||||
var reloader = bun.default_allocator.create(HotReloader) catch @panic("OOM");
|
||||
reloader.* = .{
|
||||
.vm = this,
|
||||
.verbose = this.log.level.atLeast(.info),
|
||||
};
|
||||
this.bun_watcher = JSC.Watcher.init(
|
||||
reloader,
|
||||
this.bundler.fs,
|
||||
bun.default_allocator,
|
||||
) catch @panic("Failed to enable File Watcher");
|
||||
|
||||
this.bundler.resolver.watcher = Resolver.ResolveWatcher(*Watcher, onMaybeWatchDirectory).init(this.bun_watcher.?);
|
||||
|
||||
this.bun_watcher.?.start() catch @panic("Failed to start File Watcher");
|
||||
}
|
||||
|
||||
pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: StoredFileDescriptorType) void {
|
||||
// We don't want to watch:
|
||||
// - Directories outside the root directory
|
||||
// - Directories inside node_modules
|
||||
if (std.mem.indexOf(u8, file_path, "node_modules") == null and std.mem.indexOf(u8, file_path, watch.fs.top_level_dir) != null) {
|
||||
watch.addDirectory(dir_fd, file_path, Watcher.getHash(file_path), false) catch {};
|
||||
}
|
||||
}
|
||||
|
||||
fn putTombstone(this: *HotReloader, key: []const u8, value: *bun.fs.FileSystem.RealFS.EntriesOption) void {
|
||||
this.tombstones.put(bun.default_allocator, key, value) catch unreachable;
|
||||
}
|
||||
|
||||
fn getTombstone(this: *HotReloader, key: []const u8) ?*bun.fs.FileSystem.RealFS.EntriesOption {
|
||||
return this.tombstones.get(key);
|
||||
}
|
||||
|
||||
pub fn onFileUpdate(
|
||||
this: *HotReloader,
|
||||
events: []watcher.WatchEvent,
|
||||
changed_files: []?[:0]u8,
|
||||
watchlist: watcher.Watchlist,
|
||||
) void {
|
||||
var slice = watchlist.slice();
|
||||
const file_paths = slice.items(.file_path);
|
||||
var counts = slice.items(.count);
|
||||
const kinds = slice.items(.kind);
|
||||
const hashes = slice.items(.hash);
|
||||
const parents = slice.items(.parent_hash);
|
||||
var file_descriptors = slice.items(.fd);
|
||||
var ctx = this.vm.bun_watcher.?;
|
||||
defer ctx.flushEvictions();
|
||||
defer Output.flush();
|
||||
|
||||
var bundler = &this.vm.bundler;
|
||||
var fs: *Fs.FileSystem = bundler.fs;
|
||||
var rfs: *Fs.FileSystem.RealFS = &fs.fs;
|
||||
var resolver = &bundler.resolver;
|
||||
var _on_file_update_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
var current_task: HotReloadTask = .{
|
||||
.reloader = this,
|
||||
};
|
||||
defer current_task.enqueue();
|
||||
|
||||
for (events) |event| {
|
||||
const file_path = file_paths[event.index];
|
||||
const update_count = counts[event.index] + 1;
|
||||
counts[event.index] = update_count;
|
||||
const kind = kinds[event.index];
|
||||
|
||||
// so it's consistent with the rest
|
||||
// if we use .extname we might run into an issue with whether or not the "." is included.
|
||||
// const path = Fs.PathName.init(file_path);
|
||||
const id = hashes[event.index];
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
Output.prettyErrorln("[watch] {s} ({s}, {})", .{ file_path, @tagName(kind), event.op });
|
||||
pub fn run(this: *HotReloadTask) void {
|
||||
this.reloader.ctx.reload();
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.file => {
|
||||
if (event.op.delete or event.op.rename) {
|
||||
ctx.removeAtIndex(
|
||||
event.index,
|
||||
0,
|
||||
&.{},
|
||||
.file,
|
||||
);
|
||||
}
|
||||
pub fn enqueue(this: *HotReloadTask) void {
|
||||
if (this.count == 0)
|
||||
return;
|
||||
|
||||
if (this.verbose)
|
||||
Output.prettyErrorln("<r><d>File changed: {s}<r>", .{fs.relativeTo(file_path)});
|
||||
if (comptime reload_immediately) {
|
||||
bun.reloadProcess(bun.default_allocator, Output.enable_ansi_colors);
|
||||
unreachable;
|
||||
}
|
||||
|
||||
if (event.op.write or event.op.delete or event.op.rename) {
|
||||
current_task.append(id);
|
||||
}
|
||||
var that = bun.default_allocator.create(HotReloadTask) catch unreachable;
|
||||
|
||||
that.* = this.*;
|
||||
this.count = 0;
|
||||
that.concurrent_task.task = Task.init(that);
|
||||
this.reloader.enqueueTaskConcurrent(&that.concurrent_task);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *HotReloadTask) void {
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
fn NewCallback(comptime FunctionSignature: type) type {
|
||||
return union(enum) {
|
||||
javascript_callback: JSC.Strong,
|
||||
zig_callback: struct {
|
||||
ptr: *anyopaque,
|
||||
function: *const FunctionSignature,
|
||||
},
|
||||
.directory => {
|
||||
var affected_buf: [128][]const u8 = undefined;
|
||||
var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null;
|
||||
};
|
||||
}
|
||||
|
||||
const affected = brk: {
|
||||
if (comptime Environment.isMac) {
|
||||
pub const OnAcceptCallback = NewCallback(fn (
|
||||
vm: *JSC.VirtualMachine,
|
||||
specifier: []const u8,
|
||||
) void);
|
||||
|
||||
pub fn enableHotModuleReloading(this: *Ctx) void {
|
||||
if (this.bun_watcher != null)
|
||||
return;
|
||||
|
||||
var reloader = bun.default_allocator.create(Reloader) catch @panic("OOM");
|
||||
reloader.* = .{
|
||||
.ctx = this,
|
||||
.verbose = if (@hasField(Ctx, "log")) this.log.level.atLeast(.info) else false,
|
||||
};
|
||||
this.bun_watcher = @This().Watcher.init(
|
||||
reloader,
|
||||
this.bundler.fs,
|
||||
bun.default_allocator,
|
||||
) catch @panic("Failed to enable File Watcher");
|
||||
|
||||
this.bundler.resolver.watcher = Resolver.ResolveWatcher(*@This().Watcher, onMaybeWatchDirectory).init(this.bun_watcher.?);
|
||||
|
||||
this.bun_watcher.?.start() catch @panic("Failed to start File Watcher");
|
||||
}
|
||||
|
||||
pub fn onMaybeWatchDirectory(watch: *@This().Watcher, file_path: string, dir_fd: StoredFileDescriptorType) void {
|
||||
// We don't want to watch:
|
||||
// - Directories outside the root directory
|
||||
// - Directories inside node_modules
|
||||
if (std.mem.indexOf(u8, file_path, "node_modules") == null and std.mem.indexOf(u8, file_path, watch.fs.top_level_dir) != null) {
|
||||
watch.addDirectory(dir_fd, file_path, @This().Watcher.getHash(file_path), false) catch {};
|
||||
}
|
||||
}
|
||||
|
||||
fn putTombstone(this: *@This(), key: []const u8, value: *bun.fs.FileSystem.RealFS.EntriesOption) void {
|
||||
this.tombstones.put(bun.default_allocator, key, value) catch unreachable;
|
||||
}
|
||||
|
||||
fn getTombstone(this: *@This(), key: []const u8) ?*bun.fs.FileSystem.RealFS.EntriesOption {
|
||||
return this.tombstones.get(key);
|
||||
}
|
||||
|
||||
pub fn onFileUpdate(
|
||||
this: *@This(),
|
||||
events: []watcher.WatchEvent,
|
||||
changed_files: []?[:0]u8,
|
||||
watchlist: watcher.Watchlist,
|
||||
) void {
|
||||
var slice = watchlist.slice();
|
||||
const file_paths = slice.items(.file_path);
|
||||
var counts = slice.items(.count);
|
||||
const kinds = slice.items(.kind);
|
||||
const hashes = slice.items(.hash);
|
||||
const parents = slice.items(.parent_hash);
|
||||
var file_descriptors = slice.items(.fd);
|
||||
var ctx = this.ctx.bun_watcher.?;
|
||||
defer ctx.flushEvictions();
|
||||
defer Output.flush();
|
||||
|
||||
var bundler = if (@TypeOf(this.ctx.bundler) == *bun.Bundler)
|
||||
this.ctx.bundler
|
||||
else
|
||||
&this.ctx.bundler;
|
||||
|
||||
var fs: *Fs.FileSystem = bundler.fs;
|
||||
var rfs: *Fs.FileSystem.RealFS = &fs.fs;
|
||||
var resolver = &bundler.resolver;
|
||||
var _on_file_update_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
|
||||
var current_task: HotReloadTask = .{
|
||||
.reloader = this,
|
||||
};
|
||||
defer current_task.enqueue();
|
||||
|
||||
for (events) |event| {
|
||||
const file_path = file_paths[event.index];
|
||||
const update_count = counts[event.index] + 1;
|
||||
counts[event.index] = update_count;
|
||||
const kind = kinds[event.index];
|
||||
|
||||
// so it's consistent with the rest
|
||||
// if we use .extname we might run into an issue with whether or not the "." is included.
|
||||
// const path = Fs.PathName.init(file_path);
|
||||
const id = hashes[event.index];
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
Output.prettyErrorln("[watch] {s} ({s}, {})", .{ file_path, @tagName(kind), event.op });
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.file => {
|
||||
if (event.op.delete or event.op.rename) {
|
||||
ctx.removeAtIndex(
|
||||
event.index,
|
||||
0,
|
||||
&.{},
|
||||
.file,
|
||||
);
|
||||
}
|
||||
|
||||
if (this.verbose)
|
||||
Output.prettyErrorln("<r><d>File changed: {s}<r>", .{fs.relativeTo(file_path)});
|
||||
|
||||
if (event.op.write or event.op.delete or event.op.rename) {
|
||||
current_task.append(id);
|
||||
}
|
||||
},
|
||||
.directory => {
|
||||
var affected_buf: [128][]const u8 = undefined;
|
||||
var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null;
|
||||
|
||||
const affected = brk: {
|
||||
if (comptime Environment.isMac) {
|
||||
if (rfs.entries.get(file_path)) |existing| {
|
||||
this.putTombstone(file_path, existing);
|
||||
entries_option = existing;
|
||||
} else if (this.getTombstone(file_path)) |existing| {
|
||||
entries_option = existing;
|
||||
}
|
||||
|
||||
var affected_i: usize = 0;
|
||||
|
||||
// if a file descriptor is stale, we need to close it
|
||||
if (event.op.delete and entries_option != null) {
|
||||
for (parents, 0..) |parent_hash, entry_id| {
|
||||
if (parent_hash == id) {
|
||||
const affected_path = file_paths[entry_id];
|
||||
const was_deleted = check: {
|
||||
std.os.access(affected_path, std.os.F_OK) catch break :check true;
|
||||
break :check false;
|
||||
};
|
||||
if (!was_deleted) continue;
|
||||
|
||||
affected_buf[affected_i] = affected_path[file_path.len..];
|
||||
affected_i += 1;
|
||||
if (affected_i >= affected_buf.len) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break :brk affected_buf[0..affected_i];
|
||||
}
|
||||
|
||||
break :brk event.names(changed_files);
|
||||
};
|
||||
|
||||
if (affected.len > 0 and !Environment.isMac) {
|
||||
if (rfs.entries.get(file_path)) |existing| {
|
||||
this.putTombstone(file_path, existing);
|
||||
entries_option = existing;
|
||||
} else if (this.getTombstone(file_path)) |existing| {
|
||||
entries_option = existing;
|
||||
}
|
||||
}
|
||||
|
||||
var affected_i: usize = 0;
|
||||
resolver.bustDirCache(file_path);
|
||||
|
||||
// if a file descriptor is stale, we need to close it
|
||||
if (event.op.delete and entries_option != null) {
|
||||
for (parents, 0..) |parent_hash, entry_id| {
|
||||
if (parent_hash == id) {
|
||||
const affected_path = file_paths[entry_id];
|
||||
const was_deleted = check: {
|
||||
std.os.access(affected_path, std.os.F_OK) catch break :check true;
|
||||
break :check false;
|
||||
};
|
||||
if (!was_deleted) continue;
|
||||
if (entries_option) |dir_ent| {
|
||||
var last_file_hash: @This().Watcher.HashType = std.math.maxInt(@This().Watcher.HashType);
|
||||
|
||||
affected_buf[affected_i] = affected_path[file_path.len..];
|
||||
affected_i += 1;
|
||||
if (affected_i >= affected_buf.len) break;
|
||||
}
|
||||
for (affected) |changed_name_| {
|
||||
const changed_name: []const u8 = if (comptime Environment.isMac)
|
||||
changed_name_
|
||||
else
|
||||
bun.asByteSlice(changed_name_.?);
|
||||
if (changed_name.len == 0 or changed_name[0] == '~' or changed_name[0] == '.') continue;
|
||||
|
||||
const loader = (bundler.options.loaders.get(Fs.PathName.init(changed_name).ext) orelse .file);
|
||||
var prev_entry_id: usize = std.math.maxInt(usize);
|
||||
if (loader.isJavaScriptLikeOrJSON() or loader == .css) {
|
||||
var path_string: bun.PathString = undefined;
|
||||
var file_hash: @This().Watcher.HashType = last_file_hash;
|
||||
const abs_path: string = brk: {
|
||||
if (dir_ent.entries.get(@ptrCast([]const u8, changed_name))) |file_ent| {
|
||||
// reset the file descriptor
|
||||
file_ent.entry.cache.fd = 0;
|
||||
file_ent.entry.need_stat = true;
|
||||
path_string = file_ent.entry.abs_path;
|
||||
file_hash = @This().Watcher.getHash(path_string.slice());
|
||||
for (hashes, 0..) |hash, entry_id| {
|
||||
if (hash == file_hash) {
|
||||
if (file_descriptors[entry_id] != 0) {
|
||||
if (prev_entry_id != entry_id) {
|
||||
current_task.append(@truncate(u32, entry_id));
|
||||
ctx.removeAtIndex(
|
||||
@truncate(u16, entry_id),
|
||||
0,
|
||||
&.{},
|
||||
.file,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
prev_entry_id = entry_id;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
break :brk path_string.slice();
|
||||
} else {
|
||||
var file_path_without_trailing_slash = std.mem.trimRight(u8, file_path, std.fs.path.sep_str);
|
||||
@memcpy(&_on_file_update_path_buf, file_path_without_trailing_slash.ptr, file_path_without_trailing_slash.len);
|
||||
_on_file_update_path_buf[file_path_without_trailing_slash.len] = std.fs.path.sep;
|
||||
|
||||
@memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len + 1 ..].ptr, changed_name.ptr, changed_name.len);
|
||||
const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1];
|
||||
file_hash = @This().Watcher.getHash(path_slice);
|
||||
break :brk path_slice;
|
||||
}
|
||||
};
|
||||
|
||||
// skip consecutive duplicates
|
||||
if (last_file_hash == file_hash) continue;
|
||||
last_file_hash = file_hash;
|
||||
|
||||
if (this.verbose)
|
||||
Output.prettyErrorln("<r> <d>File change: {s}<r>", .{fs.relativeTo(abs_path)});
|
||||
}
|
||||
}
|
||||
|
||||
break :brk affected_buf[0..affected_i];
|
||||
}
|
||||
|
||||
break :brk event.names(changed_files);
|
||||
};
|
||||
|
||||
if (affected.len > 0 and !Environment.isMac) {
|
||||
if (rfs.entries.get(file_path)) |existing| {
|
||||
this.putTombstone(file_path, existing);
|
||||
entries_option = existing;
|
||||
} else if (this.getTombstone(file_path)) |existing| {
|
||||
entries_option = existing;
|
||||
if (this.verbose) {
|
||||
Output.prettyErrorln("<r> <d>Dir change: {s}<r>", .{fs.relativeTo(file_path)});
|
||||
}
|
||||
}
|
||||
|
||||
resolver.bustDirCache(file_path);
|
||||
|
||||
if (entries_option) |dir_ent| {
|
||||
var last_file_hash: Watcher.HashType = std.math.maxInt(Watcher.HashType);
|
||||
|
||||
for (affected) |changed_name_| {
|
||||
const changed_name: []const u8 = if (comptime Environment.isMac)
|
||||
changed_name_
|
||||
else
|
||||
bun.asByteSlice(changed_name_.?);
|
||||
if (changed_name.len == 0 or changed_name[0] == '~' or changed_name[0] == '.') continue;
|
||||
|
||||
const loader = (bundler.options.loaders.get(Fs.PathName.init(changed_name).ext) orelse .file);
|
||||
var prev_entry_id: usize = std.math.maxInt(usize);
|
||||
if (loader.isJavaScriptLikeOrJSON() or loader == .css) {
|
||||
var path_string: bun.PathString = undefined;
|
||||
var file_hash: Watcher.HashType = last_file_hash;
|
||||
const abs_path: string = brk: {
|
||||
if (dir_ent.entries.get(@ptrCast([]const u8, changed_name))) |file_ent| {
|
||||
// reset the file descriptor
|
||||
file_ent.entry.cache.fd = 0;
|
||||
file_ent.entry.need_stat = true;
|
||||
path_string = file_ent.entry.abs_path;
|
||||
file_hash = Watcher.getHash(path_string.slice());
|
||||
for (hashes, 0..) |hash, entry_id| {
|
||||
if (hash == file_hash) {
|
||||
if (file_descriptors[entry_id] != 0) {
|
||||
if (prev_entry_id != entry_id) {
|
||||
current_task.append(@truncate(u32, entry_id));
|
||||
ctx.removeAtIndex(
|
||||
@truncate(u16, entry_id),
|
||||
0,
|
||||
&.{},
|
||||
.file,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
prev_entry_id = entry_id;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
break :brk path_string.slice();
|
||||
} else {
|
||||
var file_path_without_trailing_slash = std.mem.trimRight(u8, file_path, std.fs.path.sep_str);
|
||||
@memcpy(&_on_file_update_path_buf, file_path_without_trailing_slash.ptr, file_path_without_trailing_slash.len);
|
||||
_on_file_update_path_buf[file_path_without_trailing_slash.len] = std.fs.path.sep;
|
||||
|
||||
@memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len + 1 ..].ptr, changed_name.ptr, changed_name.len);
|
||||
const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1];
|
||||
file_hash = Watcher.getHash(path_slice);
|
||||
break :brk path_slice;
|
||||
}
|
||||
};
|
||||
|
||||
// skip consecutive duplicates
|
||||
if (last_file_hash == file_hash) continue;
|
||||
last_file_hash = file_hash;
|
||||
|
||||
if (this.verbose)
|
||||
Output.prettyErrorln("<r> <d>File change: {s}<r>", .{fs.relativeTo(abs_path)});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.verbose) {
|
||||
Output.prettyErrorln("<r> <d>Dir change: {s}<r>", .{fs.relativeTo(file_path)});
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user