diff --git a/.cursor/rules/dev-server-tests.mdc b/.cursor/rules/dev-server-tests.mdc
new file mode 100644
index 0000000000..dfdaf41cd5
--- /dev/null
+++ b/.cursor/rules/dev-server-tests.mdc
@@ -0,0 +1,139 @@
+---
+description: Writing HMR/Dev Server tests
+globs: test/bake/*
+---
+
+# Writing HMR/Dev Server tests
+
+Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
+
+## File Structure
+
+- `test/bake/bake-harness.ts` - shared utilities and test harness
+ - primary test functions `devTest` / `prodTest` / `devAndProductionTest`
+ - class `Dev` (controls subprocess for dev server)
+ - class `Client` (controls a happy-dom subprocess for having the page open)
+ - more helpers
+- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
+- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
+- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
+
+## Categories
+
+bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
+css.test.ts - CSS tests concern bundling bugs with CSS files
+plugins.test.ts - Plugin tests concern plugins in development mode.
+ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
+esm.test.ts - ESM tests are about various esm features in development mode.
+html.test.ts - HTML tests are tests relating to HTML files themselves.
+react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
+sourcemap.test.ts - Tests verifying source-maps are correct.
+
+## `devTest` Basics
+
+A test takes in two primary inputs: `files` and `async test(dev) {`
+
+```ts
+import { devTest, emptyHtmlFile } from "../bake-harness";
+
+devTest("html file is watched", {
+ files: {
+ "index.html": emptyHtmlFile({
+ scripts: ["/script.ts"],
+ body: "
Hello
",
+ }),
+ "script.ts": `
+ console.log("hello");
+ `,
+ },
+ async test(dev) {
+ await dev.fetch("/").expect.toInclude("Hello
");
+ await dev.fetch("/").expect.toInclude("Hello
");
+ await dev.patch("index.html", {
+ find: "Hello",
+ replace: "World",
+ });
+ await dev.fetch("/").expect.toInclude("World
");
+
+ // Works
+ await using c = await dev.client("/");
+ await c.expectMessage("hello");
+
+ // Editing HTML reloads
+ await c.expectReload(async () => {
+ await dev.patch("index.html", {
+ find: "World",
+ replace: "Hello",
+ });
+ await dev.fetch("/").expect.toInclude("Hello
");
+ });
+ await c.expectMessage("hello");
+
+ await c.expectReload(async () => {
+ await dev.patch("index.html", {
+ find: "Hello",
+ replace: "Bar",
+ });
+ await dev.fetch("/").expect.toInclude("Bar
");
+ });
+ await c.expectMessage("hello");
+
+ await c.expectReload(async () => {
+ await dev.patch("script.ts", {
+ find: "hello",
+ replace: "world",
+ });
+ });
+ await c.expectMessage("world");
+ },
+});
+```
+
+`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
+
+Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
+
+When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
+
+Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
+
+## Testing for bundling errors
+
+By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
+
+```ts
+devTest("import then create", {
+ files: {
+ "index.html": `
+
+
+
+
+
+
+
+ `,
+ "script.ts": `
+ import data from "./data";
+ console.log(data);
+ `,
+ },
+ async test(dev) {
+ const c = await dev.client("/", {
+ errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
+ });
+ await c.expectReload(async () => {
+ await dev.write("data.ts", "export default 'data';");
+ });
+ await c.expectMessage("data");
+ },
+});
+```
+
+Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
+
+```ts
+await dev.delete("other.ts", {
+ errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
+});
+```
diff --git a/build.zig b/build.zig
index 10b80a2a45..a43a8929f2 100644
--- a/build.zig
+++ b/build.zig
@@ -25,9 +25,10 @@ comptime {
if (!std.mem.eql(u8, builtin.zig_version_string, recommended_zig_version)) {
@compileError(
"" ++
- "Bun requires Zig version " ++ recommended_zig_version ++ ". This is" ++
- "automatically configured via Bun's CMake setup. You likely meant to run" ++
- "`bun setup`. If you are trying to upgrade the Zig compiler," ++
+ "Bun requires Zig version " ++ recommended_zig_version ++ " (found " ++
+ builtin.zig_version_string ++ "). This is " ++
+ "automatically configured via Bun's CMake setup. You likely meant to run " ++
+ "`bun setup`. If you are trying to upgrade the Zig compiler, " ++
"run `./scripts/download-zig.sh master` or comment this message out.",
);
}
diff --git a/src/OutputFile.zig b/src/OutputFile.zig
index cb81216539..329b1b5b28 100644
--- a/src/OutputFile.zig
+++ b/src/OutputFile.zig
@@ -233,15 +233,11 @@ pub fn init(options: Options) OutputFile {
};
}
-/// Given the `--outdir` as root_dir, this will return the relative path to display in terminal
-pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u8) ![]const u8 {
+pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u8) !void {
switch (f.value) {
+ .noop => {},
.saved => {
- var rel_path = f.dest_path;
- if (f.dest_path.len > root_dir_path.len) {
- rel_path = resolve_path.relative(root_dir_path, f.dest_path);
- }
- return rel_path;
+ // already written to disk
},
.buffer => |value| {
var rel_path = f.dest_path;
@@ -270,19 +266,12 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u
.string = JSC.PathString.init(rel_path),
} },
}).unwrap();
-
- return rel_path;
},
.move => |value| {
try f.moveTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
- return value.pathname;
},
.copy => |value| {
try f.copyTo(root_dir_path, value.pathname, bun.toFD(root_dir.fd));
- return value.pathname;
- },
- .noop => {
- return f.dest_path;
},
.pending => unreachable,
}
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 7ab4d400e0..b569cc3dc3 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -800,9 +800,6 @@ pub const Api = struct {
/// import_source
import_source: []const u8,
- /// react_fast_refresh
- react_fast_refresh: bool = false,
-
pub fn decode(reader: anytype) anyerror!Jsx {
var this = std.mem.zeroes(Jsx);
@@ -811,7 +808,6 @@ pub const Api = struct {
this.fragment = try reader.readValue([]const u8);
this.development = try reader.readValue(bool);
this.import_source = try reader.readValue([]const u8);
- this.react_fast_refresh = try reader.readValue(bool);
return this;
}
@@ -821,7 +817,6 @@ pub const Api = struct {
try writer.writeValue(@TypeOf(this.fragment), this.fragment);
try writer.writeInt(@as(u8, @intFromBool(this.development)));
try writer.writeValue(@TypeOf(this.import_source), this.import_source);
- try writer.writeInt(@as(u8, @intFromBool(this.react_fast_refresh)));
}
};
diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig
index c4432af274..773e3d150f 100644
--- a/src/bake/DevServer.zig
+++ b/src/bake/DevServer.zig
@@ -225,8 +225,6 @@ pub const RouteBundle = struct {
pub const Framework = struct {
route_index: Route.Index,
- // TODO: micro-opt: use a singular strong
-
/// Cached to avoid re-creating the array every request.
/// TODO: Invalidated when a layout is added or removed from this route.
cached_module_list: JSC.Strong,
@@ -255,7 +253,7 @@ pub const RouteBundle = struct {
/// and css information. Invalidated when:
/// - The HTML file itself modified.
/// - The list of CSS files changes.
- /// - TODO: Any downstream file is rebundled.
+ /// - Any downstream file is rebundled.
cached_response: ?*StaticRoute,
const ByteOffset = bun.GenericIndex(u32, u8);
@@ -739,6 +737,7 @@ pub fn deinit(dev: *DevServer) void {
.watcher_atomics = for (&dev.watcher_atomics.events) |*event| {
event.aligned.dirs.deinit(dev.allocator);
event.aligned.files.deinit(dev.allocator);
+ event.aligned.extra_files.deinit(dev.allocator);
},
};
dev.allocation_scope.deinit();
@@ -939,8 +938,7 @@ fn scanInitialRoutes(dev: *DevServer) !void {
}
/// Returns true if a catch-all handler was attached.
-// TODO: rename to setRoutes to match server.zig
-pub fn attachRoutes(dev: *DevServer, server: anytype) !bool {
+pub fn setRoutes(dev: *DevServer, server: anytype) !bool {
// TODO: all paths here must be prefixed with publicPath if set.
dev.server = bun.JSC.API.AnyServer.from(server);
const app = server.app.?;
@@ -2600,17 +2598,26 @@ fn startNextBundleIfPresent(dev: *DevServer) void {
if (dev.next_bundle.reload_event != null or dev.next_bundle.requests.first != null) {
var sfb = std.heap.stackFallback(4096, dev.allocator);
const temp_alloc = sfb.get();
- var entry_points: EntryPointList = EntryPointList.empty;
+ var entry_points: EntryPointList = .empty;
defer entry_points.deinit(temp_alloc);
- if (dev.next_bundle.reload_event) |event| {
+ const is_reload, const timer = if (dev.next_bundle.reload_event) |event| brk: {
+ dev.next_bundle.reload_event = null;
+
+ const reload_event_timer = event.timer;
+
event.processFileList(dev, &entry_points, temp_alloc);
if (dev.watcher_atomics.recycleEventFromDevServer(event)) |second| {
+ if (Environment.isDebug) {
+ assert(second.debug_mutex.tryLock());
+ }
second.processFileList(dev, &entry_points, temp_alloc);
dev.watcher_atomics.recycleSecondEventFromDevServer(second);
}
- }
+
+ break :brk .{ true, reload_event_timer };
+ } else .{ false, std.time.Timer.start() catch @panic("timers unsupported") };
for (dev.next_bundle.route_queue.keys()) |route_bundle_index| {
const rb = dev.routeBundlePtr(route_bundle_index);
@@ -2618,14 +2625,9 @@ fn startNextBundleIfPresent(dev: *DevServer) void {
dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index) catch bun.outOfMemory();
}
- const is_reload, const timer = if (dev.next_bundle.reload_event) |re|
- .{ true, re.timer }
- else
- .{ false, std.time.Timer.start() catch @panic("timers unsupported") };
dev.startAsyncBundle(entry_points, is_reload, timer) catch bun.outOfMemory();
dev.next_bundle.route_queue.clearRetainingCapacity();
- dev.next_bundle.reload_event = null;
}
}
@@ -2636,6 +2638,7 @@ pub fn handleParseTaskFailure(
graph: bake.Graph,
abs_path: []const u8,
log: *const Log,
+ bv2: *BundleV2,
) bun.OOM!void {
dev.graph_safety_lock.lock();
defer dev.graph_safety_lock.unlock();
@@ -2647,15 +2650,12 @@ pub fn handleParseTaskFailure(
log.msgs.items.len,
});
- if (err == error.FileNotFound) {
- // Special-case files being deleted. Note that if a
- // file never existed, resolution would fail first.
- //
- // TODO: this should walk up the graph one level, and queue all of these
- // files for re-bundling if they aren't already in the BundleV2 graph.
+ if (err == error.FileNotFound or err == error.ModuleNotFound) {
+ // Special-case files being deleted. Note that if a file had never
+ // existed, resolution would fail first.
switch (graph) {
- .server, .ssr => try dev.server_graph.onFileDeleted(abs_path, log),
- .client => try dev.client_graph.onFileDeleted(abs_path, log),
+ .server, .ssr => dev.server_graph.onFileDeleted(abs_path, bv2),
+ .client => dev.client_graph.onFileDeleted(abs_path, bv2),
}
} else {
switch (graph) {
@@ -4326,15 +4326,52 @@ pub fn IncrementalGraph(side: bake.Side) type {
}
}
- pub fn onFileDeleted(g: *@This(), abs_path: []const u8, log: *const Log) !void {
+ pub fn onFileDeleted(g: *@This(), abs_path: []const u8, bv2: *bun.BundleV2) void {
const index = g.getFileIndex(abs_path) orelse return;
- if (g.first_dep.items[index.get()] == .none) {
- g.disconnectAndDeleteFile(index);
- } else {
- // TODO: This is incorrect, delete it!
- // Keep the file so others may refer to it, but mark as failed.
- try g.insertFailure(.abs_path, abs_path, log, false);
+ const keys = g.bundled_files.keys();
+
+ // Disconnect all imports
+ var it: ?EdgeIndex = g.first_import.items[index.get()].unwrap();
+ while (it) |edge_index| {
+ const dep = g.edges.items[edge_index.get()];
+ it = dep.next_import.unwrap();
+ assert(dep.dependency == index);
+
+ g.disconnectEdgeFromDependencyList(edge_index);
+ g.freeEdge(edge_index);
+ }
+
+ // Rebuild all dependencies
+ it = g.first_dep.items[index.get()].unwrap();
+ while (it) |edge_index| {
+ const dep = g.edges.items[edge_index.get()];
+ it = dep.next_import.unwrap();
+ assert(dep.imported == index);
+
+ bv2.enqueueFileFromDevServerIncrementalGraphInvalidation(
+ keys[dep.dependency.get()],
+ switch (side) {
+ .client => .browser,
+ .server => .bun,
+ },
+ ) catch bun.outOfMemory();
+ }
+
+ // Bust the resolution caches of the dir containing this file,
+ // so that it cannot be resolved.
+ const dirname = std.fs.path.dirname(abs_path) orelse abs_path;
+ _ = bv2.transpiler.resolver.bustDirCache(dirname);
+
+ // Additionally, clear the cached entry of the file from the path to
+ // source index map.
+ const hash = bun.hash(abs_path);
+ for ([_]*bun.bundle_v2.PathToSourceIndexMap{
+ &bv2.graph.path_to_source_index_map,
+ &bv2.graph.client_path_to_source_index_map,
+ &bv2.graph.ssr_path_to_source_index_map,
+ }) |map| {
+ _ = map.remove(hash);
}
}
@@ -5725,6 +5762,9 @@ pub const MessageId = enum(u8) {
/// Sent in response to `set_url`.
/// - `u32`: Route index
set_url_response = 'n',
+ /// Used for syncronization in dev server tests, to identify when a update was
+ /// acknowledged by the watcher but intentionally took no action.
+ redundant_watch = 'r',
pub inline fn char(id: MessageId) u8 {
return @intFromEnum(id);
@@ -5748,6 +5788,7 @@ const HmrTopic = enum(u8) {
errors = 'e',
browser_error = 'E',
visualizer = 'v',
+ redundant_watch = 'r',
/// Invalid data
_,
@@ -5916,12 +5957,6 @@ pub fn startReloadBundle(dev: *DevServer, event: *HotReloadEvent) bun.OOM!void {
event.processFileList(dev, &entry_points, temp_alloc);
if (entry_points.set.count() == 0) {
- // TODO: Files which become disconnected from the incremental graph
- // should be removed from the watcher.
- Output.debugWarn("nothing to bundle. watcher may potentially be watching too many files.", .{});
- Output.debugWarn("modified files: {s}", .{
- bun.fmt.fmtSlice(event.files.keys(), ", "),
- });
return;
}
@@ -5970,15 +6005,21 @@ pub const HotReloadEvent = struct {
concurrent_task: JSC.ConcurrentTask,
/// The watcher is not able to peek into IncrementalGraph to know what files
/// to invalidate, so the watch events are de-duplicated and passed along.
+ /// The keys are owned by the file watcher.
files: bun.StringArrayHashMapUnmanaged(void),
/// Directories are watched so that resolution failures can be solved.
+ /// The keys are owned by the file watcher.
dirs: bun.StringArrayHashMapUnmanaged(void),
+ /// Same purpose as `files` but keys do not have an owner.
+ extra_files: std.ArrayListUnmanaged(u8),
/// Initialized by the WatcherAtomics.watcherAcquireEvent
timer: std.time.Timer,
/// This event may be referenced by either DevServer or Watcher thread.
/// 1 if referenced, 0 if unreferenced; see WatcherAtomics
contention_indicator: std.atomic.Value(u32),
+ debug_mutex: if (Environment.isDebug) bun.Mutex else void,
+
pub fn initEmpty(owner: *DevServer) HotReloadEvent {
return .{
.owner = owner,
@@ -5987,9 +6028,21 @@ pub const HotReloadEvent = struct {
.dirs = .empty,
.timer = undefined,
.contention_indicator = .init(0),
+ .debug_mutex = if (Environment.isDebug) .{} else {},
+ .extra_files = .empty,
};
}
+ pub fn reset(ev: *HotReloadEvent) void {
+ if (Environment.isDebug)
+ ev.debug_mutex.unlock();
+
+ ev.files.clearRetainingCapacity();
+ ev.dirs.clearRetainingCapacity();
+ ev.extra_files.clearRetainingCapacity();
+ ev.timer = undefined;
+ }
+
pub fn isEmpty(ev: *const HotReloadEvent) bool {
return (ev.files.count() + ev.dirs.count()) == 0;
}
@@ -5998,8 +6051,23 @@ pub const HotReloadEvent = struct {
_ = event.files.getOrPut(allocator, file_path) catch bun.outOfMemory();
}
- pub fn appendDir(event: *HotReloadEvent, allocator: Allocator, file_path: []const u8) void {
- _ = event.dirs.getOrPut(allocator, file_path) catch bun.outOfMemory();
+ pub fn appendDir(event: *HotReloadEvent, allocator: Allocator, dir_path: []const u8, maybe_sub_path: ?[]const u8) void {
+ if (dir_path.len == 0) return;
+ _ = event.dirs.getOrPut(allocator, dir_path) catch bun.outOfMemory();
+
+ const sub_path = maybe_sub_path orelse return;
+ if (sub_path.len == 0) return;
+
+ const platform = bun.path.Platform.auto;
+ const ends_with_sep = platform.isSeparator(dir_path[dir_path.len - 1]);
+ const starts_with_sep = platform.isSeparator(sub_path[0]);
+ const sep_offset: i32 = if (ends_with_sep and starts_with_sep) -1 else 1;
+
+ event.extra_files.ensureUnusedCapacity(allocator, @intCast(@as(i32, @intCast(dir_path.len + sub_path.len)) + sep_offset + 1)) catch bun.outOfMemory();
+ event.extra_files.appendSliceAssumeCapacity(if (ends_with_sep) dir_path[0 .. dir_path.len - 1] else dir_path);
+ event.extra_files.appendAssumeCapacity(platform.separator());
+ event.extra_files.appendSliceAssumeCapacity(sub_path);
+ event.extra_files.appendAssumeCapacity(0);
}
/// Invalidates items in IncrementalGraph, appending all new items to `entry_points`
@@ -6057,6 +6125,15 @@ pub const HotReloadEvent = struct {
}
}
+ var rest_extra = event.extra_files.items;
+ while (bun.strings.indexOfChar(rest_extra, 0)) |str| {
+ event.files.put(dev.allocator, rest_extra[0..str], {}) catch bun.outOfMemory();
+ rest_extra = rest_extra[str + 1 ..];
+ }
+ if (rest_extra.len > 0) {
+ event.files.put(dev.allocator, rest_extra, {}) catch bun.outOfMemory();
+ }
+
const changed_file_paths = event.files.keys();
inline for (.{ &dev.server_graph, &dev.client_graph }) |g| {
g.invalidate(changed_file_paths, entry_points, temp_alloc) catch bun.outOfMemory();
@@ -6072,10 +6149,18 @@ pub const HotReloadEvent = struct {
}
if (entry_points.set.count() == 0) {
- Output.debugWarn("nothing to bundle. watcher may potentially be watching too many files.", .{});
- Output.debugWarn("modified files: {s}", .{
- bun.fmt.fmtSlice(changed_file_paths, ", "),
- });
+ Output.debugWarn("nothing to bundle", .{});
+ if (changed_file_paths.len > 0)
+ Output.debugWarn("modified files: {s}", .{
+ bun.fmt.fmtSlice(changed_file_paths, ", "),
+ });
+
+ if (event.dirs.count() > 0)
+ Output.debugWarn("modified dirs: {s}", .{
+ bun.fmt.fmtSlice(event.dirs.keys(), ", "),
+ });
+
+ dev.publish(.redundant_watch, &.{MessageId.redundant_watch.char()}, .binary);
return;
}
}
@@ -6085,7 +6170,8 @@ pub const HotReloadEvent = struct {
defer debug.log("HMR Task end", .{});
const dev = first.owner;
- if (Environment.allow_assert) {
+ if (Environment.isDebug) {
+ assert(first.debug_mutex.tryLock());
assert(first.contention_indicator.load(.seq_cst) == 0);
}
@@ -6104,6 +6190,9 @@ pub const HotReloadEvent = struct {
const timer = first.timer;
if (dev.watcher_atomics.recycleEventFromDevServer(first)) |second| {
+ if (Environment.isDebug) {
+ assert(second.debug_mutex.tryLock());
+ }
second.processFileList(dev, &entry_points, temp_alloc);
dev.watcher_atomics.recycleSecondEventFromDevServer(second);
}
@@ -6185,6 +6274,9 @@ const WatcherAtomics = struct {
ev.owner.bun_watcher.thread_lock.assertLocked();
+ if (Environment.isDebug)
+ assert(ev.debug_mutex.tryLock());
+
return ev;
}
@@ -6194,6 +6286,15 @@ const WatcherAtomics = struct {
state.watcher_has_event.unlock();
ev.owner.bun_watcher.thread_lock.assertLocked();
+ if (Environment.isDebug) {
+ for (std.mem.asBytes(&ev.timer)) |b| {
+ if (b != 0xAA) break;
+ } else @panic("timer is undefined memory in watcherReleaseAndSubmitEvent");
+ }
+
+ if (Environment.isDebug)
+ ev.debug_mutex.unlock();
+
if (!ev.isEmpty()) {
@branchHint(.likely);
// There are files to be processed, increment this count first.
@@ -6230,8 +6331,7 @@ const WatcherAtomics = struct {
/// Called by DevServer after it receives a task callback. If this returns
/// another event, that event must be recycled with `recycleSecondEventFromDevServer`
fn recycleEventFromDevServer(state: *WatcherAtomics, first_event: *HotReloadEvent) ?*HotReloadEvent {
- first_event.files.clearRetainingCapacity();
- first_event.timer = undefined;
+ first_event.reset();
// Reset the watch count to zero, while detecting if
// the other watch event was submitted.
@@ -6269,8 +6369,7 @@ const WatcherAtomics = struct {
}
fn recycleSecondEventFromDevServer(state: *WatcherAtomics, second_event: *HotReloadEvent) void {
- second_event.files.clearRetainingCapacity();
- second_event.timer = undefined;
+ second_event.reset();
state.dev_server_has_event.unlock();
if (Environment.allow_assert) {
@@ -6284,8 +6383,6 @@ const WatcherAtomics = struct {
/// Called on watcher's thread; Access to dev-server state restricted.
pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []?[:0]u8, watchlist: Watcher.ItemList) void {
- _ = changed_files;
-
debug.log("onFileUpdate start", .{});
defer debug.log("onFileUpdate end", .{});
@@ -6320,7 +6417,13 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []?
ev.appendFile(dev.allocator, file_path);
},
.directory => {
- ev.appendDir(dev.allocator, file_path);
+ // INotifyWatcher stores sub paths into `changed_files`
+ // the other platforms do not appear to write anything into `changed_files` ever.
+ if (Environment.isLinux) {
+ ev.appendDir(dev.allocator, file_path, if (event.name_len > 0) changed_files[event.name_off] else null);
+ } else {
+ ev.appendDir(dev.allocator, file_path, null);
+ }
},
}
}
@@ -6477,7 +6580,7 @@ pub const EntryPointList = struct {
pub const empty: EntryPointList = .{ .set = .{} };
- const Flags = packed struct(u8) {
+ pub const Flags = packed struct(u8) {
client: bool = false,
server: bool = false,
ssr: bool = false,
diff --git a/src/bake/bake.zig b/src/bake/bake.zig
index c49c67c0d5..95931f0a2c 100644
--- a/src/bake/bake.zig
+++ b/src/bake/bake.zig
@@ -807,14 +807,6 @@ pub fn addImportMetaDefines(
"import.meta.env.STATIC",
Define.Data.initBoolean(mode == .production_static),
);
-
- if (mode != .development) {
- try define.insert(
- allocator,
- "import.meta.hot",
- Define.Data.initBoolean(false),
- );
- }
}
pub const server_virtual_source: bun.logger.Source = .{
diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts
index 778494ddb3..084d767648 100644
--- a/src/bake/hmr-module.ts
+++ b/src/bake/hmr-module.ts
@@ -73,7 +73,6 @@ export class HotModule {
/** for MJS <-> CJS interop. this stores the other module exports */
_ext_exports = undefined;
_esm = false;
- _import_meta: ImportMeta | undefined = undefined;
_cached_failure: any = undefined;
/** modules that import THIS module */
_deps: Map = new Map();
@@ -89,6 +88,8 @@ export class HotModule {
enumerable: false,
});
}
+
+ this.require = this.require.bind(this);
}
require(id: Id, onReload?: ExportsCallbackFunction) {
@@ -103,14 +104,7 @@ export class HotModule {
mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: expectedImports } : undefined);
const { exports, _esm } = mod;
const object = _esm ? exports : (mod._ext_exports ??= runtimeHelpers.__toESM(exports));
-
- // if (expectedImports && mod._state === State.Ready) {
- // for (const key of expectedImports) {
- // if (!(key in object)) {
- // throw new SyntaxError(`The requested module '${id}' does not provide an export named '${key}'`);
- // }
- // }
- // }
+ // TODO: ESM rewrite
return object;
}
@@ -137,8 +131,16 @@ export class HotModule {
return _esm ? exports : (mod._ext_exports ??= { ...exports, default: exports });
}
- importMeta() {
- return (this._import_meta ??= initImportMeta(this));
+ get importMeta() {
+ const importMeta = initImportMeta(this);
+ Object.defineProperty(this, "importMeta", { value: importMeta });
+ return importMeta;
+ }
+
+ get hot() {
+ const hot = new Hot(this);
+ Object.defineProperty(this, "hot", { value: hot });
+ return hot;
}
/** Server-only */
@@ -166,11 +168,8 @@ function initImportMeta(m: HotModule): ImportMeta {
url: `bun://${m.id}`,
main: false,
// @ts-ignore
- get hot() {
- const hot = new Hot(m);
- Object.defineProperty(this, "hot", { value: hot });
- return hot;
- },
+ require: m.require,
+ // transpiler rewrites `import.meta.hot` to access `HotModule.hot`
};
}
@@ -195,6 +194,10 @@ class Hot {
console.warn("TODO: implement ImportMetaHot.accept (called from " + JSON.stringify(this.#module.id) + ")");
}
+ acceptSpecifiers(specifiers: string | readonly string[], cb?: HotAcceptFunction) {
+ console.warn("TODO: implement ImportMetaHot.accept (called from " + JSON.stringify(this.#module.id) + ")");
+ }
+
decline() {} // Vite: "This is currently a noop and is there for backward compatibility"
dispose(cb: HotDisposeFunction) {
@@ -258,7 +261,7 @@ export function loadModule(
}
const load = input_graph[key];
if (type < 0 && isAsyncFunction(load)) {
- // TODO: This is possible to implement, but requires some care.
+ // TODO: This is possible to implement, but requires some care. (ESM rewrite)
throw new Error("Cannot load ES module synchronously");
}
if (!load) {
diff --git a/src/bake/incremental_visualizer.html b/src/bake/incremental_visualizer.html
index 31af95998c..b61bc05f47 100644
--- a/src/bake/incremental_visualizer.html
+++ b/src/bake/incremental_visualizer.html
@@ -102,6 +102,7 @@
// over the wire. A visualization is provided by `vis.js`. Support
//
// Script written partially by ChatGPT
+ let isFirst = false;
const c = {
// Derived from mocha theme on https://catppuccin.com/palette
diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig
index 001f8841f1..493e6b0621 100644
--- a/src/bun.js/api/server.zig
+++ b/src/bun.js/api/server.zig
@@ -8051,7 +8051,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
var has_dev_catch_all = false;
if (dev_server) |dev| {
// DevServer adds a catch-all handler to use FrameworkRouter (full stack apps)
- has_dev_catch_all = dev.attachRoutes(this) catch bun.outOfMemory();
+ has_dev_catch_all = dev.setRoutes(this) catch bun.outOfMemory();
}
if (!has_dev_catch_all and this.config.onRequest != .zero) {
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index 49da09b91a..4f76f98e7e 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -160,6 +160,7 @@ fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []
specifier,
std.math.maxInt(u64),
) catch "";
+ defer bun.default_allocator.free(source_file);
var bufw = std.io.bufferedWriter(file.writer());
const w = bufw.writer();
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 05792cd0ae..46b509ffe1 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -976,6 +976,66 @@ pub const BundleV2 = struct {
}
}
+ pub fn enqueueFileFromDevServerIncrementalGraphInvalidation(
+ this: *BundleV2,
+ path_slice: []const u8,
+ target: options.Target,
+ ) !void {
+ // TODO: plugins
+ const entry = try this.pathToSourceIndexMap(target).getOrPut(this.graph.allocator, bun.hash(path_slice));
+ if (entry.found_existing) {
+ return;
+ }
+ const t = this.transpilerForTarget(target);
+ const result = t.resolveEntryPoint(path_slice) catch
+ return;
+ var path = result.path_pair.primary;
+ this.incrementScanCounter();
+ const source_index = Index.source(this.graph.input_files.len);
+ const loader = brk: {
+ const default = path.loader(&this.transpiler.options.loaders) orelse .file;
+ break :brk default;
+ };
+
+ path = this.pathWithPrettyInitialized(path, target) catch bun.outOfMemory();
+ path.assertPrettyIsValid();
+ entry.value_ptr.* = source_index.get();
+ this.graph.ast.append(bun.default_allocator, JSAst.empty) catch bun.outOfMemory();
+
+ try this.graph.input_files.append(bun.default_allocator, .{
+ .source = .{
+ .path = path,
+ .contents = "",
+ .index = source_index,
+ },
+ .loader = loader,
+ .side_effects = result.primary_side_effects_data,
+ });
+ var task = try this.graph.allocator.create(ParseTask);
+ task.* = ParseTask.init(&result, source_index, this);
+ task.loader = loader;
+ task.task.node.next = null;
+ task.tree_shaking = this.linker.options.tree_shaking;
+ task.known_target = target;
+ task.jsx.development = switch (t.options.force_node_env) {
+ .development => true,
+ .production => false,
+ .unspecified => t.options.jsx.development,
+ };
+
+ // Handle onLoad plugins as entry points
+ if (!this.enqueueOnLoadPluginIfNeeded(task)) {
+ if (loader.shouldCopyForBundling()) {
+ var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
+ additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable;
+ this.graph.input_files.items(.side_effects)[source_index.get()] = .no_side_effects__pure_data;
+ this.graph.estimated_file_loader_count += 1;
+ }
+
+ this.graph.pool.schedule(task);
+ }
+ }
+
pub fn enqueueEntryItem(
this: *BundleV2,
hash: ?u64,
@@ -1211,8 +1271,10 @@ pub const BundleV2 = struct {
// gets its content set after the scan+parse phase, but before linking.
//
// The dev server does not use these, as it is implement in the HMR runtime.
- if (this.transpiler.options.dev_server == null) {
+ if (variant != .dev_server) {
try this.reserveSourceIndexesForBake();
+ } else {
+ bun.assert(this.transpiler.options.dev_server != null);
}
{
@@ -1238,8 +1300,18 @@ pub const BundleV2 = struct {
},
.dev_server => {
for (data.files.set.keys(), data.files.set.values()) |abs_path, flags| {
- const resolved = this.transpiler.resolveEntryPoint(abs_path) catch
+ const resolved = this.transpiler.resolveEntryPoint(abs_path) catch |err| {
+ const dev = this.transpiler.options.dev_server orelse unreachable;
+ dev.handleParseTaskFailure(
+ err,
+ if (flags.client) .client else .server,
+ abs_path,
+ this.transpiler.log,
+ this,
+ ) catch bun.outOfMemory();
+ this.transpiler.log.reset();
continue;
+ };
if (flags.client) brk: {
const source_index = try this.enqueueEntryItem(null, resolved, true, .browser) orelse break :brk;
@@ -2169,7 +2241,7 @@ pub const BundleV2 = struct {
const source_index = load.source_index;
var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
additional_files.push(this.graph.allocator, .{ .source_index = source_index.get() }) catch unreachable;
- this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data;
+ this.graph.input_files.items(.side_effects)[source_index.get()] = .no_side_effects__pure_data;
this.graph.estimated_file_loader_count += 1;
}
this.graph.input_files.items(.loader)[load.source_index.get()] = code.loader;
@@ -2184,7 +2256,11 @@ pub const BundleV2 = struct {
this.graph.pool.schedule(parse_task);
if (this.bun_watcher) |watcher| add_watchers: {
- // TODO: support explicit watchFiles array
+ if (!this.shouldAddWatcherPlugin(load.namespace, load.path)) break :add_watchers;
+
+ // TODO: support explicit watchFiles array. this is not done
+ // right now because DevServer requires a table to map
+ // watched files and dirs to their respective dependants.
const fd = if (bun.Watcher.requires_file_descriptors)
switch (bun.sys.open(
&(std.posix.toPosixPath(load.path) catch break :add_watchers),
@@ -2196,7 +2272,8 @@ pub const BundleV2 = struct {
}
else
bun.invalid_fd;
- _ = watcher.appendFile(
+
+ _ = watcher.addFile(
fd,
load.path,
bun.Watcher.getHash(load.path),
@@ -2223,6 +2300,7 @@ pub const BundleV2 = struct {
load.bakeGraph(),
source.path.keyForIncrementalGraph(),
&temp_log,
+ this,
) catch bun.outOfMemory();
} else {
log.msgs.append(msg) catch bun.outOfMemory();
@@ -2478,6 +2556,20 @@ pub const BundleV2 = struct {
return try this.linker.generateChunksInParallel(chunks, false);
}
+ fn shouldAddWatcherPlugin(bv2: *BundleV2, namespace: []const u8, path: []const u8) bool {
+ return bun.strings.eqlComptime(namespace, "file") and
+ std.fs.path.isAbsolute(path) and
+ bv2.shouldAddWatcher(path);
+ }
+
+ fn shouldAddWatcher(bv2: *BundleV2, path: []const u8) bool {
+ return if (bv2.transpiler.options.dev_server != null)
+ bun.strings.indexOf(path, "/node_modules/") == null and
+ (if (Environment.isWindows) bun.strings.indexOf(path, "\\node_modules\\") == null else true)
+ else
+ true; // `bun build --watch` has always watched node_modules
+ }
+
/// Dev Server uses this instead to run a subset of the transpiler, and to run it asynchronously.
pub fn startFromBakeDevServer(this: *BundleV2, bake_entry_points: bake.DevServer.EntryPointList) !DevServerInput {
this.unique_key = generateUniqueKey();
@@ -2554,6 +2646,7 @@ pub const BundleV2 = struct {
.client,
sources[index].path.text,
&log,
+ this,
);
// Since there is an error, do not treat it as a
// valid CSS chunk.
@@ -2916,6 +3009,14 @@ pub const BundleV2 = struct {
}
};
+ if (strings.eqlComptime(import_record.path.text, "bun:wrap")) {
+ import_record.path.namespace = "bun";
+ import_record.tag = .runtime;
+ import_record.path.text = "wrap";
+ import_record.source_index = .runtime;
+ continue;
+ }
+
if (ast.target.isBun()) {
if (JSC.HardcodedModule.Aliases.get(import_record.path.text, options.Target.bun)) |replacement| {
import_record.path.text = replacement.path;
@@ -3286,15 +3387,17 @@ pub const BundleV2 = struct {
inline .empty, .err => |data| graph.input_files.items(.source)[data.source_index.get()],
.success => |val| val.source,
};
- _ = watcher.addFile(
- parse_result.watcher_data.fd,
- source.path.text,
- bun.hash32(source.path.text),
- graph.input_files.items(.loader)[source.index.get()],
- parse_result.watcher_data.dir_fd,
- null,
- false,
- );
+ if (this.shouldAddWatcher(source.path.text)) {
+ _ = watcher.addFile(
+ parse_result.watcher_data.fd,
+ source.path.text,
+ bun.hash32(source.path.text),
+ graph.input_files.items(.loader)[source.index.get()],
+ parse_result.watcher_data.dir_fd,
+ null,
+ false,
+ );
+ }
}
}
@@ -3528,6 +3631,7 @@ pub const BundleV2 = struct {
err.target.bakeGraph(),
this.graph.input_files.items(.source)[err.source_index.get()].path.text,
&err.log,
+ this,
) catch bun.outOfMemory();
} else if (err.log.msgs.items.len > 0) {
err.log.cloneToWithRecycled(this.transpiler.log, true) catch unreachable;
@@ -4926,7 +5030,6 @@ pub const ParseTask = struct {
opts.macro_context = &this.data.macro_context;
opts.package_version = task.package_version;
- opts.features.auto_polyfill_require = output_format == .esm;
opts.features.allow_runtime = !source.index.isRuntime();
opts.features.unwrap_commonjs_to_esm = output_format == .esm and FeatureFlags.unwrap_commonjs_to_esm;
opts.features.top_level_await = output_format == .esm or output_format == .internal_bake_dev;
@@ -4939,6 +5042,7 @@ pub const ParseTask = struct {
opts.features.emit_decorator_metadata = transpiler.options.emit_decorator_metadata;
opts.features.unwrap_commonjs_packages = transpiler.options.unwrap_commonjs_packages;
opts.features.hot_module_reloading = output_format == .internal_bake_dev and !source.index.isRuntime();
+ opts.features.auto_polyfill_require = output_format == .esm and !opts.features.hot_module_reloading;
opts.features.react_fast_refresh = target == .browser and
transpiler.options.react_fast_refresh and
loader.isJSX() and
@@ -5623,7 +5727,7 @@ pub const AdditionalFile = union(enum) {
output_file: Index.Int,
};
-const PathToSourceIndexMap = std.HashMapUnmanaged(u64, Index.Int, IdentityContext(u64), 80);
+pub const PathToSourceIndexMap = std.HashMapUnmanaged(u64, Index.Int, IdentityContext(u64), 80);
const EntryPoint = struct {
/// This may be an absolute path or a relative path. If absolute, it will
@@ -12902,7 +13006,7 @@ pub const LinkerContext = struct {
stmts: *StmtList,
part_stmts: []const js_ast.Stmt,
allocator: std.mem.Allocator,
- ast: *const JSAst,
+ ast: *JSAst,
) !void {
_ = source_index; // may be used
@@ -12941,7 +13045,18 @@ pub const LinkerContext = struct {
true;
// module.importSync('path', (module) => ns = module, ['dep', 'etc'])
- const call = if (is_enabled) call: {
+ const call = if (is_enabled) if (record.tag == .runtime)
+ Expr.init(E.Call, .{
+ .target = Expr.init(E.Dot, .{
+ .target = module_id,
+ .name = "require",
+ .name_loc = stmt.loc,
+ }, stmt.loc),
+ .args = .init(
+ try allocator.dupe(Expr, &.{Expr.init(E.String, .{ .data = "bun:wrap" }, .Empty)}),
+ ),
+ }, .Empty)
+ else call: {
const path = if (record.source_index.isValid())
c.parse_graph.input_files.items(.source)[record.source_index.get()].path
else
@@ -13051,7 +13166,7 @@ pub const LinkerContext = struct {
// referencing everything by array makes the code a lot more annoying :(
var ast: JSAst = c.graph.ast.get(part_range.source_index.get());
- // For Bun Kit, part generation is entirely special cased.
+ // For HMR, part generation is entirely special cased.
// - export wrapping is already done.
// - import wrapping needs to know resolved paths
// - one part range per file (ensured by another special cased code path in findAllImportedPartsInJSOrder)
@@ -13115,6 +13230,8 @@ pub const LinkerContext = struct {
},
} }, Logger.Loc.Empty));
+ ast.flags.uses_module_ref = true;
+
return c.printCodeForFileInChunkJS(
r,
allocator,
@@ -13122,9 +13239,9 @@ pub const LinkerContext = struct {
(&single_stmt)[0..1],
&ast,
flags,
- toESMRef,
- toCommonJSRef,
- runtimeRequireRef,
+ .None,
+ .None,
+ null,
part_range.source_index,
);
}
@@ -13667,7 +13784,7 @@ pub const LinkerContext = struct {
.indent = .{},
.commonjs_named_exports = ast.commonjs_named_exports,
.commonjs_named_exports_ref = ast.exports_ref,
- .commonjs_module_ref = if (ast.flags.uses_module_ref or c.options.output_format == .internal_bake_dev)
+ .commonjs_module_ref = if (ast.flags.uses_module_ref)
ast.module_ref
else
Ref.None,
diff --git a/src/bunfig.zig b/src/bunfig.zig
index b7d62bc340..ae8691d90b 100644
--- a/src/bunfig.zig
+++ b/src/bunfig.zig
@@ -858,7 +858,6 @@ pub const Bunfig = struct {
.import_source = @constCast(jsx_import_source),
.runtime = jsx_runtime,
.development = jsx_dev,
- .react_fast_refresh = false,
};
} else {
var jsx: *Api.Jsx = &this.bunfig.jsx.?;
diff --git a/src/cli.zig b/src/cli.zig
index 5cc3d9d13f..558805e8db 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -271,7 +271,8 @@ pub const Arguments = struct {
} ++ auto_only_params;
const build_only_params = [_]ParamType{
- clap.parseParam("--compile Generate a standalone Bun executable containing your bundled code") catch unreachable,
+ clap.parseParam("--production Set NODE_ENV=production and enable minification") catch unreachable,
+ clap.parseParam("--compile Generate a standalone Bun executable containing your bundled code. Implies --production") catch unreachable,
clap.parseParam("--bytecode Use a bytecode cache") catch unreachable,
clap.parseParam("--watch Automatically restart the process on file change") catch unreachable,
clap.parseParam("--no-clear-screen Disable clearing the terminal screen on reload when --watch is enabled") catch unreachable,
@@ -297,7 +298,7 @@ pub const Arguments = struct {
clap.parseParam("--minify-syntax Minify syntax and inline data") catch unreachable,
clap.parseParam("--minify-whitespace Minify whitespace") catch unreachable,
clap.parseParam("--minify-identifiers Minify identifiers") catch unreachable,
- clap.parseParam("--css-chunking Chunk CSS files together to reduce duplicated CSS loaded in a browser. Only has an effect when multiple entrypoints import CSS") catch unreachable,
+ clap.parseParam("--css-chunking Chunk CSS files together to reduce duplicated CSS loaded in a browser. Only has an effect when multiple entrypoints import CSS") catch unreachable,
clap.parseParam("--dump-environment-variables") catch unreachable,
clap.parseParam("--conditions ... Pass custom conditions to resolve") catch unreachable,
clap.parseParam("--app (EXPERIMENTAL) Build a web app for production using Bun Bake.") catch unreachable,
@@ -833,6 +834,8 @@ pub const Arguments = struct {
ctx.bundler_options.transform_only = args.flag("--no-bundle");
ctx.bundler_options.bytecode = args.flag("--bytecode");
+ const production = args.flag("--production");
+
if (args.flag("--app")) {
if (!bun.FeatureFlags.bake()) {
Output.errGeneric("To use the experimental \"--app\" option, upgrade to the canary build of bun via \"bun upgrade --canary\"", .{});
@@ -864,7 +867,7 @@ pub const Arguments = struct {
ctx.bundler_options.footer = footer;
}
- const minify_flag = args.flag("--minify");
+ const minify_flag = args.flag("--minify") or production;
ctx.bundler_options.minify_syntax = minify_flag or args.flag("--minify-syntax");
ctx.bundler_options.minify_whitespace = minify_flag or args.flag("--minify-whitespace");
ctx.bundler_options.minify_identifiers = minify_flag or args.flag("--minify-identifiers");
@@ -1128,7 +1131,6 @@ pub const Arguments = struct {
const jsx_fragment = args.option("--jsx-fragment");
const jsx_import_source = args.option("--jsx-import-source");
const jsx_runtime = args.option("--jsx-runtime");
- const react_fast_refresh = true;
if (cmd == .AutoCommand or cmd == .RunCommand) {
// "run.silent" in bunfig.toml
@@ -1163,8 +1165,7 @@ pub const Arguments = struct {
if (jsx_factory != null or
jsx_fragment != null or
jsx_import_source != null or
- jsx_runtime != null or
- !react_fast_refresh)
+ jsx_runtime != null)
{
var default_factory = "".*;
var default_fragment = "".*;
@@ -1176,7 +1177,6 @@ pub const Arguments = struct {
.import_source = (jsx_import_source orelse &default_import_source),
.runtime = if (jsx_runtime) |runtime| try resolve_jsx_runtime(runtime) else Api.JsxRuntime.automatic,
.development = false,
- .react_fast_refresh = react_fast_refresh,
};
} else {
opts.jsx = Api.Jsx{
@@ -1185,7 +1185,6 @@ pub const Arguments = struct {
.import_source = (jsx_import_source orelse opts.jsx.?.import_source),
.runtime = if (jsx_runtime) |runtime| try resolve_jsx_runtime(runtime) else opts.jsx.?.runtime,
.development = false,
- .react_fast_refresh = react_fast_refresh,
};
}
}
@@ -1200,6 +1199,19 @@ pub const Arguments = struct {
Output.flush();
Global.exit(1);
}
+
+ if (args.flag("--production")) {
+ const any_html = for (opts.entry_points) |entry_point| {
+ if (strings.hasSuffixComptime(entry_point, ".html")) {
+ break true;
+ }
+ } else false;
+ if (any_html) {
+ ctx.bundler_options.css_chunking = true;
+ }
+
+ ctx.bundler_options.production = true;
+ }
}
if (opts.log_level) |log_level| {
@@ -1554,6 +1566,8 @@ pub const Command = struct {
bake_debug_dump_server: bool = false,
bake_debug_disable_minify: bool = false,
+ production: bool = false,
+
env_behavior: Api.DotEnvBehavior = .disable,
env_prefix: []const u8 = "",
elide_lines: ?usize = null,
diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig
index 2596dfb6fa..1c33f0fa48 100644
--- a/src/cli/build_command.zig
+++ b/src/cli/build_command.zig
@@ -224,10 +224,17 @@ pub const BuildCommand = struct {
this_transpiler.options.env.behavior = ctx.bundler_options.env_behavior;
this_transpiler.options.env.prefix = ctx.bundler_options.env_prefix;
+ if (ctx.bundler_options.production) {
+ try this_transpiler.env.map.put("NODE_ENV", "production");
+ }
+
try this_transpiler.configureDefines();
this_transpiler.configureLinker();
- // This is currently done in DevServer by default, but not in Bun.build
+ if (ctx.bundler_options.production) {
+ bun.assert(!this_transpiler.options.jsx.development);
+ }
+
if (!this_transpiler.options.production) {
try this_transpiler.options.conditions.appendSlice(&.{"development"});
}
@@ -316,6 +323,15 @@ pub const BuildCommand = struct {
break :brk result.output_files;
}
+ if (ctx.bundler_options.outdir.len == 0 and outfile.len > 0 and !ctx.bundler_options.compile) {
+ this_transpiler.options.entry_naming = try std.fmt.allocPrint(allocator, "./{s}", .{
+ std.fs.path.basename(outfile),
+ });
+ if (std.fs.path.dirname(outfile)) |dir|
+ ctx.bundler_options.outdir = dir;
+ this_transpiler.resolver.opts.entry_naming = this_transpiler.options.entry_naming;
+ }
+
break :brk (BundleV2.generateFromCLI(
&this_transpiler,
allocator,
@@ -338,168 +354,209 @@ pub const BuildCommand = struct {
};
const bundled_end = std.time.nanoTimestamp();
- {
- var write_summary = false;
- {
- dump: {
- defer Output.flush();
- var writer = Output.writer();
- var output_dir = this_transpiler.options.output_dir;
+ var had_err = false;
+ dump: {
+ defer Output.flush();
+ var writer = Output.writer();
+ var output_dir = this_transpiler.options.output_dir;
- const will_be_one_file =
- // --outdir is not supported with --compile
- // but you can still use --outfile
- // in which case, we should set the output dir to the dirname of the outfile
- // https://github.com/oven-sh/bun/issues/8697
- ctx.bundler_options.compile or
- (output_files.len == 1 and output_files[0].value == .buffer);
+ const will_be_one_file =
+ // --outdir is not supported with --compile
+ // but you can still use --outfile
+ // in which case, we should set the output dir to the dirname of the outfile
+ // https://github.com/oven-sh/bun/issues/8697
+ ctx.bundler_options.compile or
+ (output_files.len == 1 and output_files[0].value == .buffer);
- if (output_dir.len == 0 and outfile.len > 0 and will_be_one_file) {
- output_dir = std.fs.path.dirname(outfile) orelse ".";
- output_files[0].dest_path = std.fs.path.basename(outfile);
- }
+ if (output_dir.len == 0 and outfile.len > 0 and will_be_one_file) {
+ output_dir = std.fs.path.dirname(outfile) orelse ".";
+ output_files[0].dest_path = std.fs.path.basename(outfile);
+ }
- if (!ctx.bundler_options.compile) {
- if (outfile.len == 0 and output_files.len == 1 and ctx.bundler_options.outdir.len == 0) {
- // if --no-bundle is passed, it won't have an output dir
- if (output_files[0].value == .buffer)
- try writer.writeAll(output_files[0].value.buffer.bytes);
- break :dump;
- }
- }
-
- var root_path = output_dir;
- if (root_path.len == 0 and ctx.args.entry_points.len == 1)
- root_path = std.fs.path.dirname(ctx.args.entry_points[0]) orelse ".";
-
- const root_dir = if (root_path.len == 0 or strings.eqlComptime(root_path, "."))
- std.fs.cwd()
- else
- std.fs.cwd().makeOpenPath(root_path, .{}) catch |err| {
- Output.prettyErrorln("{s} while attempting to open output directory {}", .{ @errorName(err), bun.fmt.quote(root_path) });
- exitOrWatch(1, ctx.debug.hot_reload == .watch);
- unreachable;
- };
-
- const all_paths = try ctx.allocator.alloc([]const u8, output_files.len);
- var max_path_len: usize = 0;
- for (all_paths, output_files) |*dest, src| {
- dest.* = src.dest_path;
- }
-
- const from_path = resolve_path.longestCommonPath(all_paths);
-
- for (output_files) |f| {
- max_path_len = @max(
- @max(from_path.len, f.dest_path.len) + 2 - from_path.len,
- max_path_len,
- );
- }
-
- if (ctx.bundler_options.compile) {
- printSummary(
- bundled_end,
- minify_duration,
- this_transpiler.options.minify_identifiers or this_transpiler.options.minify_whitespace or this_transpiler.options.minify_syntax,
- input_code_length,
- reachable_file_count,
- output_files,
- );
-
- Output.flush();
-
- const is_cross_compile = !compile_target.isDefault();
-
- if (outfile.len == 0 or strings.eqlComptime(outfile, ".") or strings.eqlComptime(outfile, "..") or strings.eqlComptime(outfile, "../")) {
- outfile = "index";
- }
-
- if (compile_target.os == .windows and !strings.hasSuffixComptime(outfile, ".exe")) {
- outfile = try std.fmt.allocPrint(allocator, "{s}.exe", .{outfile});
- }
-
- try bun.StandaloneModuleGraph.toExecutable(
- compile_target,
- allocator,
- output_files,
- root_dir,
- this_transpiler.options.public_path,
- outfile,
- this_transpiler.env,
- this_transpiler.options.output_format,
- ctx.bundler_options.windows_hide_console,
- ctx.bundler_options.windows_icon,
- );
- const compiled_elapsed = @divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bundled_end)), @as(i64, std.time.ns_per_ms));
- const compiled_elapsed_digit_count: isize = switch (compiled_elapsed) {
- 0...9 => 3,
- 10...99 => 2,
- 100...999 => 1,
- 1000...9999 => 0,
- else => 0,
- };
- const padding_buf = [_]u8{' '} ** 16;
- const padding_ = padding_buf[0..@as(usize, @intCast(compiled_elapsed_digit_count))];
- Output.pretty("{s}", .{padding_});
-
- Output.printElapsedStdoutTrim(@as(f64, @floatFromInt(compiled_elapsed)));
-
- Output.pretty(" compile {s}{s}", .{
- outfile,
- if (compile_target.os == .windows and !strings.hasSuffixComptime(outfile, ".exe")) ".exe" else "",
- });
-
- if (is_cross_compile) {
- Output.pretty(" {s}\n", .{compile_target});
- } else {
- Output.pretty("\n", .{});
- }
-
- break :dump;
- }
-
- // On posix, file handles automatically close on process exit by the OS
- // Closing files shows up in profiling.
- // So don't do that unless we actually need to.
- // const do_we_need_to_close = !FeatureFlags.store_file_descriptors or (@intCast(usize, root_dir.fd) + open_file_limit) < output_files.len;
-
- for (output_files) |f| {
- const rel_path = f.writeToDisk(root_dir, from_path) catch |err| {
- Output.err(err, "failed to write file '{}'", .{bun.fmt.quote(f.dest_path)});
- continue;
- };
-
- // Print summary
- _ = try writer.write("\n");
- const padding_count = 2 + (@max(rel_path.len, max_path_len) - rel_path.len);
- try writer.writeByteNTimes(' ', 2);
- try writer.writeAll(rel_path);
- try writer.writeByteNTimes(' ', padding_count);
- const size = @as(f64, @floatFromInt(f.size)) / 1000.0;
- try std.fmt.formatType(size, "d", .{ .precision = 2 }, writer, 1);
- try writer.writeAll(" KB\n");
- }
-
- write_summary = true;
- }
- if (write_summary and log.errors == 0) {
- Output.prettyln("\n", .{});
- Output.printElapsedStdoutTrim(
- @as(f64, @floatFromInt((@divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bun.CLI.start_time)), @as(i64, std.time.ns_per_ms))))),
- );
- if (this_transpiler.options.transform_only) {
- Output.prettyln(" transpile", .{});
- } else {
- Output.prettyln(" bundle {d} modules", .{
- reachable_file_count,
- });
- }
+ if (!ctx.bundler_options.compile) {
+ if (outfile.len == 0 and output_files.len == 1 and ctx.bundler_options.outdir.len == 0) {
+ // if --no-bundle is passed, it won't have an output dir
+ if (output_files[0].value == .buffer)
+ try writer.writeAll(output_files[0].value.buffer.bytes);
+ break :dump;
}
}
- try log.print(Output.errorWriter());
- exitOrWatch(0, ctx.debug.hot_reload == .watch);
+ var root_path = output_dir;
+ if (root_path.len == 0 and ctx.args.entry_points.len == 1)
+ root_path = std.fs.path.dirname(ctx.args.entry_points[0]) orelse ".";
+
+ const root_dir = if (root_path.len == 0 or strings.eqlComptime(root_path, "."))
+ std.fs.cwd()
+ else
+ std.fs.cwd().makeOpenPath(root_path, .{}) catch |err| {
+ Output.err(err, "could not open output directory {}", .{bun.fmt.quote(root_path)});
+ exitOrWatch(1, ctx.debug.hot_reload == .watch);
+ unreachable;
+ };
+
+ const all_paths = try ctx.allocator.alloc([]const u8, output_files.len);
+ var max_path_len: usize = 0;
+ for (all_paths, output_files) |*dest, src| {
+ dest.* = src.dest_path;
+ }
+
+ const from_path = resolve_path.longestCommonPath(all_paths);
+
+ var size_padding: usize = 0;
+
+ for (output_files) |f| {
+ max_path_len = @max(
+ @max(from_path.len, f.dest_path.len) + 2 - from_path.len,
+ max_path_len,
+ );
+ size_padding = @max(size_padding, std.fmt.count("{}", .{bun.fmt.size(f.size, .{})}));
+ }
+
+ if (ctx.bundler_options.compile) {
+ printSummary(
+ bundled_end,
+ minify_duration,
+ this_transpiler.options.minify_identifiers or this_transpiler.options.minify_whitespace or this_transpiler.options.minify_syntax,
+ input_code_length,
+ reachable_file_count,
+ output_files,
+ );
+
+ Output.flush();
+
+ const is_cross_compile = !compile_target.isDefault();
+
+ if (outfile.len == 0 or strings.eqlComptime(outfile, ".") or strings.eqlComptime(outfile, "..") or strings.eqlComptime(outfile, "../")) {
+ outfile = "index";
+ }
+
+ if (compile_target.os == .windows and !strings.hasSuffixComptime(outfile, ".exe")) {
+ outfile = try std.fmt.allocPrint(allocator, "{s}.exe", .{outfile});
+ }
+
+ try bun.StandaloneModuleGraph.toExecutable(
+ compile_target,
+ allocator,
+ output_files,
+ root_dir,
+ this_transpiler.options.public_path,
+ outfile,
+ this_transpiler.env,
+ this_transpiler.options.output_format,
+ ctx.bundler_options.windows_hide_console,
+ ctx.bundler_options.windows_icon,
+ );
+ const compiled_elapsed = @divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bundled_end)), @as(i64, std.time.ns_per_ms));
+ const compiled_elapsed_digit_count: isize = switch (compiled_elapsed) {
+ 0...9 => 3,
+ 10...99 => 2,
+ 100...999 => 1,
+ 1000...9999 => 0,
+ else => 0,
+ };
+ const padding_buf = [_]u8{' '} ** 16;
+ const padding_ = padding_buf[0..@as(usize, @intCast(compiled_elapsed_digit_count))];
+ Output.pretty("{s}", .{padding_});
+
+ Output.printElapsedStdoutTrim(@as(f64, @floatFromInt(compiled_elapsed)));
+
+ Output.pretty(" compile {s}{s}", .{
+ outfile,
+ if (compile_target.os == .windows and !strings.hasSuffixComptime(outfile, ".exe")) ".exe" else "",
+ });
+
+ if (is_cross_compile) {
+ Output.pretty(" {s}\n", .{compile_target});
+ } else {
+ Output.pretty("\n", .{});
+ }
+
+ break :dump;
+ }
+
+ if (log.errors == 0) {
+ if (this_transpiler.options.transform_only) {
+ Output.prettyln("Transpiled file in {d}ms", .{
+ @divFloor(std.time.nanoTimestamp() - bun.CLI.start_time, std.time.ns_per_ms),
+ });
+ } else {
+ Output.prettyln("Bundled {d} module{s} in {d}ms", .{
+ reachable_file_count,
+ if (reachable_file_count == 1) "" else "s",
+ @divFloor(std.time.nanoTimestamp() - bun.CLI.start_time, std.time.ns_per_ms),
+ });
+ }
+ Output.prettyln("\n", .{});
+ Output.flush();
+ }
+
+ for (output_files) |f| {
+ size_padding = @max(size_padding, std.fmt.count("{}", .{bun.fmt.size(f.size, .{})}));
+ }
+
+ for (output_files) |f| {
+ f.writeToDisk(root_dir, from_path) catch |err| {
+ Output.err(err, "failed to write file '{}'", .{bun.fmt.quote(f.dest_path)});
+ had_err = true;
+ continue;
+ };
+
+ bun.debugAssert(!std.fs.path.isAbsolute(f.dest_path));
+
+ const rel_path = bun.strings.trimPrefixComptime(u8, f.dest_path, "./");
+
+ // Print summary
+ const padding_count = (@max(rel_path.len, max_path_len) - rel_path.len);
+ try writer.writeByteNTimes(' ', 2);
+
+ if (Output.enable_ansi_colors_stdout) try writer.writeAll(switch (f.output_kind) {
+ .@"entry-point" => Output.prettyFmt("", true),
+ .chunk => Output.prettyFmt("", true),
+ .asset => Output.prettyFmt("", true),
+ .sourcemap => Output.prettyFmt("", true),
+ .bytecode => Output.prettyFmt("", true),
+ });
+
+ try writer.writeAll(rel_path);
+ if (Output.enable_ansi_colors_stdout) {
+ // highlight big files
+ const warn_threshold: usize = switch (f.output_kind) {
+ .@"entry-point", .chunk => 128 * 1024,
+ .asset => 16 * 1024 * 1024,
+ else => std.math.maxInt(usize),
+ };
+ if (f.size > warn_threshold) {
+ try writer.writeAll(Output.prettyFmt("", true));
+ } else {
+ try writer.writeAll("\x1b[0m");
+ }
+ }
+
+ try writer.writeByteNTimes(' ', padding_count);
+ try writer.print("{s} ", .{bun.fmt.size(f.size, .{})});
+ try writer.writeByteNTimes(' ', size_padding - std.fmt.count("{}", .{bun.fmt.size(f.size, .{})}));
+
+ if (Output.enable_ansi_colors_stdout) {
+ try writer.writeAll("\x1b[2m");
+ }
+ try writer.print("({s})", .{switch (f.output_kind) {
+ .@"entry-point" => "entry point",
+ .chunk => "chunk",
+ .asset => "asset",
+ .sourcemap => "source map",
+ .bytecode => "bytecode",
+ }});
+ if (Output.enable_ansi_colors_stdout)
+ try writer.writeAll("\x1b[0m");
+ try writer.writeAll("\n");
+ }
+
+ Output.prettyln("\n", .{});
}
+
+ try log.print(Output.errorWriter());
+ exitOrWatch(if (had_err) 1 else 0, ctx.debug.hot_reload == .watch);
}
};
diff --git a/src/crash_handler.zig b/src/crash_handler.zig
index 27cc449755..0a20373464 100644
--- a/src/crash_handler.zig
+++ b/src/crash_handler.zig
@@ -1829,6 +1829,10 @@ pub fn removePreCrashHandler(ptr: *anyopaque) void {
_ = before_crash_handlers.orderedRemove(index);
}
+pub fn isPanicking() bool {
+ return panicking.load(.monotonic) > 0;
+}
+
export fn Bun__crashHandler(message_ptr: [*]u8, message_len: usize) noreturn {
crashHandler(.{ .panic = message_ptr[0..message_len] }, null, @returnAddress());
}
diff --git a/src/import_record.zig b/src/import_record.zig
index ab1ff7958d..3422e325b4 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -98,12 +98,14 @@ pub const ImportKind = enum(u8) {
};
pub const ImportRecord = struct {
+ pub const Index = bun.GenericIndex(u32, ImportRecord);
+
range: logger.Range,
path: fs.Path,
kind: ImportKind,
tag: Tag = .none,
- source_index: Index = Index.invalid,
+ source_index: bun.JSAst.Index = .invalid,
print_mode: PrintMode = .normal,
diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig
index f3ca73dc22..d4c4fdd60b 100644
--- a/src/install/patch_install.zig
+++ b/src/install/patch_install.zig
@@ -370,7 +370,7 @@ pub const PatchTask = struct {
return try log.addErrorFmtOpts(
this.manager.allocator,
"failed applying patch file: {}",
- .{e.withoutPath()},
+ .{e},
.{},
);
}
@@ -471,7 +471,7 @@ pub const PatchTask = struct {
null,
Loc.Empty,
this.manager.allocator,
- "patchfile {s} is empty, plese restore or delete it.",
+ "patchfile {s} is empty, please restore or delete it.",
.{absolute_patchfile_path},
) catch bun.outOfMemory();
return null;
diff --git a/src/js/builtins/Module.ts b/src/js/builtins/Module.ts
index 3030930313..648bea5d6c 100644
--- a/src/js/builtins/Module.ts
+++ b/src/js/builtins/Module.ts
@@ -84,7 +84,11 @@ export function overridableRequire(this: CommonJSModuleRecord, id: string) {
// It only supports "true" and undefined. Anything non-truthy is treated as undefined.
// https://github.com/oven-sh/bun/issues/14411
if (namespace.__esModule === undefined) {
- namespace.__esModule = true;
+ try {
+ namespace.__esModule = true;
+ } catch {
+ // https://github.com/oven-sh/bun/issues/17816
+ }
}
return (mod.exports = namespace);
diff --git a/src/js_ast.zig b/src/js_ast.zig
index e8f663e77f..904da1df73 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -1567,6 +1567,20 @@ pub const E = struct {
inverted: bool = false,
};
+ pub const Special = union(enum) {
+ /// emits `exports` or `module.exports` depending on `commonjs_named_exports_deoptimized`
+ module_exports,
+ /// `import.meta.hot`
+ hot,
+ /// `import.meta.hot.accept`
+ hot_accept,
+ /// Converted from `hot_accept` to this in js_parser.zig when it is
+ /// passed strings. Printed as `import.meta.hot.acceptSpecifiers`
+ hot_accept_visited,
+ /// Prints the resolved specifier string for an import record.
+ resolved_specifier_string: ImportRecord.Index,
+ };
+
pub const Call = struct {
// Node:
target: ExprNodeIndex,
@@ -2764,7 +2778,7 @@ pub const E = struct {
};
pub const RequireResolveString = struct {
- import_record_index: u32 = 0,
+ import_record_index: u32,
// close_paren_loc: logger.Loc = logger.Loc.Empty,
};
@@ -4099,18 +4113,7 @@ pub const Expr = struct {
},
};
},
- E.TemplatePart => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_template_part = brk: {
- const item = allocator.create(Type) catch unreachable;
- item.* = st;
- break :brk item;
- },
- },
- };
- },
+
E.Template => {
return Expr{
.loc = loc,
@@ -4461,14 +4464,7 @@ pub const Expr = struct {
},
};
},
- E.TemplatePart => {
- return Expr{
- .loc = loc,
- .data = Data{
- .e_template_part = Data.Store.append(Type, st),
- },
- };
- },
+
E.Template => {
return Expr{
.loc = loc,
@@ -4577,7 +4573,6 @@ pub const Expr = struct {
e_jsx_element,
e_object,
e_spread,
- e_template_part,
e_template,
e_reg_exp,
e_await,
@@ -4588,7 +4583,6 @@ pub const Expr = struct {
e_import_identifier,
e_private_identifier,
e_commonjs_export_identifier,
- e_module_dot_exports,
e_boolean,
e_number,
e_big_int,
@@ -4606,6 +4600,7 @@ pub const Expr = struct {
e_import_meta,
e_import_meta_main,
e_require_main,
+ e_special,
e_inlined_enum,
// object, regex and array may have had side effects
@@ -4656,7 +4651,6 @@ pub const Expr = struct {
.e_big_int => writer.writeAll("BigInt"),
.e_object => writer.writeAll("object"),
.e_spread => writer.writeAll("..."),
- .e_template_part => writer.writeAll("template_part"),
.e_template => writer.writeAll("template"),
.e_reg_exp => writer.writeAll("regexp"),
.e_await => writer.writeAll("await"),
@@ -4945,16 +4939,7 @@ pub const Expr = struct {
},
}
}
- pub fn isTemplatePart(self: Tag) bool {
- switch (self) {
- .e_template_part => {
- return true;
- },
- else => {
- return false;
- },
- }
- }
+
pub fn isTemplate(self: Tag) bool {
switch (self) {
.e_template => {
@@ -5265,7 +5250,6 @@ pub const Expr = struct {
e_jsx_element: *E.JSXElement,
e_object: *E.Object,
e_spread: *E.Spread,
- e_template_part: *E.TemplatePart,
e_template: *E.Template,
e_reg_exp: *E.RegExp,
e_await: *E.Await,
@@ -5277,7 +5261,6 @@ pub const Expr = struct {
e_import_identifier: E.ImportIdentifier,
e_private_identifier: E.PrivateIdentifier,
e_commonjs_export_identifier: E.CommonJSExportIdentifier,
- e_module_dot_exports,
e_boolean: E.Boolean,
e_number: E.Number,
@@ -5300,6 +5283,10 @@ pub const Expr = struct {
e_import_meta_main: E.ImportMetaMain,
e_require_main,
+ /// Covers some exotic AST node types under one namespace, since the
+ /// places this is found it all follows similar handling.
+ e_special: E.Special,
+
e_inlined_enum: *E.InlinedEnum,
comptime {
@@ -5377,11 +5364,6 @@ pub const Expr = struct {
item.* = el.*;
return .{ .e_spread = item };
},
- .e_template_part => |el| {
- const item = try allocator.create(std.meta.Child(@TypeOf(this.e_template_part)));
- item.* = el.*;
- return .{ .e_template_part = item };
- },
.e_template => |el| {
const item = try allocator.create(std.meta.Child(@TypeOf(this.e_template)));
item.* = el.*;
@@ -5568,14 +5550,6 @@ pub const Expr = struct {
});
return .{ .e_spread = item };
},
- .e_template_part => |el| {
- const item = bun.create(allocator, E.TemplatePart, .{
- .value = try el.value.deepClone(allocator),
- .tail_loc = el.tail_loc,
- .tail = el.tail,
- });
- return .{ .e_template_part = item };
- },
.e_template => |el| {
const item = bun.create(allocator, E.Template, .{
.tag = if (el.tag) |tag| try tag.deepClone(allocator) else null,
@@ -5709,9 +5683,6 @@ pub const Expr = struct {
if (e.value) |value|
value.data.writeToHasher(hasher, symbol_table);
},
- .e_template_part => {
- // TODO: delete e_template_part as hit has zero usages
- },
.e_template => |e| {
_ = e; // autofix
},
@@ -5772,7 +5743,7 @@ pub const Expr = struct {
.e_new_target,
.e_require_main,
.e_import_meta,
- .e_module_dot_exports,
+ .e_special,
=> {},
}
}
@@ -7331,7 +7302,7 @@ pub const TSNamespaceMember = struct {
/// Inlined enum values can only be numbers and strings
/// This type special cases an encoding similar to JSValue, where nan-boxing is used
/// to encode both a 64-bit pointer or a 64-bit float using 64 bits.
-pub const InlinedEnumValue = packed struct {
+pub const InlinedEnumValue = struct {
raw_data: u64,
pub const Decoded = union(enum) {
@@ -7403,28 +7374,23 @@ pub const ExportsKind = enum {
// module.
esm_with_dynamic_fallback_from_cjs,
- const dynamic = std.EnumSet(ExportsKind).init(.{
- .esm_with_dynamic_fallback = true,
- .esm_with_dynamic_fallback_from_cjs = true,
- .cjs = true,
- });
-
- const with_dynamic_fallback = std.EnumSet(ExportsKind).init(.{
- .esm_with_dynamic_fallback = true,
- .esm_with_dynamic_fallback_from_cjs = true,
- });
-
pub fn isDynamic(self: ExportsKind) bool {
- return dynamic.contains(self);
+ return switch (self) {
+ .cjs, .esm_with_dynamic_fallback, .esm_with_dynamic_fallback_from_cjs => true,
+ .none, .esm => false,
+ };
+ }
+
+ pub fn isESMWithDynamicFallback(self: ExportsKind) bool {
+ return switch (self) {
+ .none, .cjs, .esm => false,
+ .esm_with_dynamic_fallback, .esm_with_dynamic_fallback_from_cjs => true,
+ };
}
pub fn jsonStringify(self: @This(), writer: anytype) !void {
return try writer.write(@tagName(self));
}
-
- pub fn isESMWithDynamicFallback(self: ExportsKind) bool {
- return with_dynamic_fallback.contains(self);
- }
};
pub const DeclaredSymbol = struct {
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 1fb03151de..3a02a181e1 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -1694,31 +1694,31 @@ pub const SideEffects = enum(u1) {
return null;
}
},
- .e_if => |__if__| {
- __if__.yes = simplifyUnusedExpr(p, __if__.yes) orelse __if__.yes.toEmpty();
- __if__.no = simplifyUnusedExpr(p, __if__.no) orelse __if__.no.toEmpty();
+ .e_if => |ternary| {
+ ternary.yes = simplifyUnusedExpr(p, ternary.yes) orelse ternary.yes.toEmpty();
+ ternary.no = simplifyUnusedExpr(p, ternary.no) orelse ternary.no.toEmpty();
// "foo() ? 1 : 2" => "foo()"
- if (__if__.yes.isEmpty() and __if__.no.isEmpty()) {
- return simplifyUnusedExpr(p, __if__.test_);
+ if (ternary.yes.isEmpty() and ternary.no.isEmpty()) {
+ return simplifyUnusedExpr(p, ternary.test_);
}
// "foo() ? 1 : bar()" => "foo() || bar()"
- if (__if__.yes.isEmpty()) {
+ if (ternary.yes.isEmpty()) {
return Expr.joinWithLeftAssociativeOp(
.bin_logical_or,
- __if__.test_,
- __if__.no,
+ ternary.test_,
+ ternary.no,
p.allocator,
);
}
// "foo() ? bar() : 2" => "foo() && bar()"
- if (__if__.no.isEmpty()) {
+ if (ternary.no.isEmpty()) {
return Expr.joinWithLeftAssociativeOp(
.bin_logical_and,
- __if__.test_,
- __if__.yes,
+ ternary.test_,
+ ternary.yes,
p.allocator,
);
}
@@ -1774,7 +1774,19 @@ pub const SideEffects = enum(u1) {
.bin_loose_ne,
=> {
if (isPrimitiveWithSideEffects(bin.left.data) and isPrimitiveWithSideEffects(bin.right.data)) {
- return Expr.joinWithComma(simplifyUnusedExpr(p, bin.left) orelse bin.left.toEmpty(), simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty(), p.allocator);
+ return Expr.joinWithComma(
+ simplifyUnusedExpr(p, bin.left) orelse bin.left.toEmpty(),
+ simplifyUnusedExpr(p, bin.right) orelse bin.right.toEmpty(),
+ p.allocator,
+ );
+ }
+ // If one side is a number, the number can be printed as
+ // `0` since the result being unused doesnt matter, we
+ // only care to invoke the coercion.
+ if (bin.left.data == .e_number) {
+ bin.left.data = .{ .e_number = .{ .value = 0.0 } };
+ } else if (bin.right.data == .e_number) {
+ bin.right.data = .{ .e_number = .{ .value = 0.0 } };
}
},
@@ -1935,7 +1947,7 @@ pub const SideEffects = enum(u1) {
result = result.joinWithComma(visited_right, p.allocator);
}
- return if (result.isMissing()) Expr.empty else result;
+ return if (result.isMissing()) null else result;
}
fn findIdentifiers(binding: Binding, decls: *std.ArrayList(G.Decl)) void {
@@ -17358,9 +17370,7 @@ fn NewParser_(
}
return expr;
- }
-
- if (e_.target.data == .e_require_resolve_call_target) {
+ } else if (e_.target.data == .e_require_resolve_call_target) {
// Ignore calls to require.resolve() if the control flow is provably
// dead here. We don't want to spend time scanning the required files
// if they will never be used.
@@ -17388,7 +17398,11 @@ fn NewParser_(
}
return expr;
- }
+ } else if (e_.target.data.as(.e_special)) |special|
+ switch (special) {
+ .hot_accept => p.handleImportMetaHotAcceptCall(e_),
+ else => {},
+ };
if (comptime allow_macros) {
if (is_macro_ref and !p.options.features.is_macro_runtime) {
@@ -18379,7 +18393,7 @@ fn NewParser_(
name_loc: logger.Loc,
identifier_opts: IdentifierOpts,
) ?Expr {
- switch (target.data) {
+ sw: switch (target.data) {
.e_identifier => |id| {
// Rewrite property accesses on explicit namespace imports as an identifier.
// This lets us replace them easily in the printer to rebind them to
@@ -18614,7 +18628,7 @@ fn NewParser_(
}
// rewrite `module.exports` to `exports`
- return .{ .data = .e_module_dot_exports, .loc = name_loc };
+ return .{ .data = .{ .e_special = .module_exports }, .loc = name_loc };
} else if (p.options.bundle and strings.eqlComptime(name, "id") and identifier_opts.assign_target == .none) {
// inline module.id
p.ignoreUsage(p.module_ref);
@@ -18681,7 +18695,6 @@ fn NewParser_(
return p.maybeRewritePropertyAccessForNamespace(name, &target, loc, name_loc);
}
},
- // TODO: e_inlined_enum -> .e_string -> "length" should inline the length
.e_string => |str| {
if (p.options.features.minify_syntax) {
// minify "long-string".length to 11
@@ -18692,6 +18705,9 @@ fn NewParser_(
}
}
},
+ .e_inlined_enum => |ie| {
+ continue :sw ie.value.data;
+ },
.e_object => |obj| {
if (comptime FeatureFlags.inline_properties_in_transpiler) {
if (p.options.features.minify_syntax) {
@@ -18726,6 +18742,13 @@ fn NewParser_(
return p.valueForImportMetaMain(false, target.loc);
}
+ if (strings.eqlComptime(name, "hot")) {
+ if (!p.options.features.hot_module_reloading)
+ return .{ .data = .e_undefined, .loc = loc };
+
+ return .{ .data = .{ .e_special = .hot }, .loc = loc };
+ }
+
// Make all property accesses on `import.meta.url` side effect free.
return p.newExpr(
E.Dot{
@@ -18773,49 +18796,57 @@ fn NewParser_(
return p.maybeRewritePropertyAccessForNamespace(name, &target, loc, name_loc);
}
},
- .e_module_dot_exports => {
- if (p.shouldUnwrapCommonJSToESM()) {
- if (!p.is_control_flow_dead) {
- if (!p.commonjs_named_exports_deoptimized) {
- if (identifier_opts.is_delete_target) {
- p.deoptimizeCommonJSNamedExports();
- return null;
- }
+ .e_special => |special| switch (special) {
+ .module_exports => {
+ if (p.shouldUnwrapCommonJSToESM()) {
+ if (!p.is_control_flow_dead) {
+ if (!p.commonjs_named_exports_deoptimized) {
+ if (identifier_opts.is_delete_target) {
+ p.deoptimizeCommonJSNamedExports();
+ return null;
+ }
- const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
- if (!named_export_entry.found_existing) {
- const new_ref = p.newSymbol(
- .other,
- std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
- ) catch unreachable;
- p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
- named_export_entry.value_ptr.* = .{
- .loc_ref = LocRef{
- .loc = name_loc,
- .ref = new_ref,
+ const named_export_entry = p.commonjs_named_exports.getOrPut(p.allocator, name) catch unreachable;
+ if (!named_export_entry.found_existing) {
+ const new_ref = p.newSymbol(
+ .other,
+ std.fmt.allocPrint(p.allocator, "${any}", .{bun.fmt.fmtIdentifier(name)}) catch unreachable,
+ ) catch unreachable;
+ p.module_scope.generated.push(p.allocator, new_ref) catch unreachable;
+ named_export_entry.value_ptr.* = .{
+ .loc_ref = LocRef{
+ .loc = name_loc,
+ .ref = new_ref,
+ },
+ .needs_decl = true,
+ };
+ if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
+ p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
+ }
+
+ const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
+ p.recordUsage(ref);
+
+ return p.newExpr(
+ E.CommonJSExportIdentifier{
+ .ref = ref,
+ // Record this as from module.exports
+ .base = .module_dot_exports,
},
- .needs_decl = true,
- };
- if (p.commonjs_named_exports_needs_conversion == std.math.maxInt(u32))
- p.commonjs_named_exports_needs_conversion = @as(u32, @truncate(p.commonjs_named_exports.count() - 1));
+ name_loc,
+ );
+ } else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
+ p.has_commonjs_export_names = true;
}
-
- const ref = named_export_entry.value_ptr.*.loc_ref.ref.?;
- p.recordUsage(ref);
-
- return p.newExpr(
- E.CommonJSExportIdentifier{
- .ref = ref,
- // Record this as from module.exports
- .base = .module_dot_exports,
- },
- name_loc,
- );
- } else if (p.options.features.commonjs_at_runtime and identifier_opts.assign_target != .none) {
- p.has_commonjs_export_names = true;
}
}
- }
+ },
+ .hot => {
+ if (strings.eqlComptime(name, "accept")) {
+ return .{ .data = .{ .e_special = .hot_accept }, .loc = loc };
+ }
+ },
+ else => {},
},
else => {},
}
@@ -23218,6 +23249,58 @@ fn NewParser_(
}
};
+ const import_meta_hot_accept_err = "Dependencies to `import.meta.hot.accept` must be statically analyzable module specifiers matching direct imports.";
+
+ /// The signatures for `import.meta.hot.accept` are:
+ /// `accept()` - self accept
+ /// `accept(Function)` - self accept
+ /// `accept(string, Function)` - accepting another module
+ /// `accept(string[], Function)` - accepting multiple modules
+ ///
+ /// The strings that can be passed in the first argument must be module
+ /// specifiers that were imported. We enforce that they line up exactly
+ /// with ones that were imported, so that it can share an import record.
+ ///
+ /// This function replaces all specifier strings with `e_require_resolve_string`
+ fn handleImportMetaHotAcceptCall(p: *@This(), call: *E.Call) void {
+ if (call.args.len == 0) return;
+ switch (call.args.at(0).data) {
+ .e_string => |str| {
+ call.args.mut(0).data = p.rewriteImportMetaHotAcceptString(str, call.args.at(0).loc) orelse
+ return;
+ },
+ .e_array => |arr| for (arr.items.slice()) |*item| {
+ if (item.data != .e_string) {
+ p.log.addError(p.source, item.loc, import_meta_hot_accept_err) catch bun.outOfMemory();
+ continue;
+ }
+ item.data = p.rewriteImportMetaHotAcceptString(item.data.e_string, item.loc) orelse
+ return;
+ },
+ else => return,
+ }
+
+ call.target.data.e_special = .hot_accept_visited;
+ }
+
+ fn rewriteImportMetaHotAcceptString(p: *P, str: *E.String, loc: logger.Loc) ?Expr.Data {
+ str.toUTF8(p.allocator) catch bun.outOfMemory();
+ const specifier = str.data;
+
+ const import_record_index = for (p.import_records.items, 0..) |import_record, i| {
+ if (bun.strings.eql(specifier, import_record.path.text)) {
+ break i;
+ }
+ } else {
+ p.log.addError(p.source, loc, import_meta_hot_accept_err) catch bun.outOfMemory();
+ return null;
+ };
+
+ return .{ .e_special = .{
+ .resolved_specifier_string = .init(@intCast(import_record_index)),
+ } };
+ }
+
const ReactRefreshExportKind = enum { named, default };
pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref, export_kind: ReactRefreshExportKind) !void {
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 589cea4c31..88f52fa037 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -2055,7 +2055,7 @@ fn NewPrinter(
p.addSourceMapping(expr.loc);
if (p.options.module_type == .internal_bake_dev) {
p.printSymbol(p.options.commonjs_module_ref);
- p.print(".importMeta()");
+ p.print(".importMeta");
} else if (!p.options.import_meta_ref.isValid()) {
// Most of the time, leave it in there
p.print("import.meta");
@@ -2116,21 +2116,43 @@ fn NewPrinter(
}
}
},
- .e_module_dot_exports => {
- p.printSpaceBeforeIdentifier();
- p.addSourceMapping(expr.loc);
+ .e_special => |special| switch (special) {
+ .module_exports => {
+ p.printSpaceBeforeIdentifier();
+ p.addSourceMapping(expr.loc);
- if (p.options.commonjs_module_exports_assigned_deoptimized) {
- if (p.options.commonjs_module_ref.isValid()) {
- p.printSymbol(p.options.commonjs_module_ref);
+ if (p.options.commonjs_module_exports_assigned_deoptimized) {
+ if (p.options.commonjs_module_ref.isValid()) {
+ p.printSymbol(p.options.commonjs_module_ref);
+ } else {
+ p.print("module");
+ }
+ p.print(".exports");
} else {
- p.print("module");
+ p.printSymbol(p.options.commonjs_named_exports_ref);
}
- p.print(".exports");
- } else {
- p.printSymbol(p.options.commonjs_named_exports_ref);
- }
+ },
+ .hot => {
+ bun.assert(p.options.module_type == .internal_bake_dev);
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".hot");
+ },
+ .hot_accept => {
+ bun.assert(p.options.module_type == .internal_bake_dev);
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".hot.accept");
+ },
+ .hot_accept_visited => {
+ bun.assert(p.options.module_type == .internal_bake_dev);
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".hot.acceptSpecifiers");
+ },
+ .resolved_specifier_string => |index| {
+ bun.assert(p.options.module_type == .internal_bake_dev);
+ p.printStringLiteralUTF8(p.importRecord(index.get()).path.pretty, true);
+ },
},
+
.e_commonjs_export_identifier => |id| {
p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
@@ -2275,6 +2297,8 @@ fn NewPrinter(
if (p.options.require_ref) |require_ref| {
p.printSymbol(require_ref);
p.print(".main");
+ } else if (p.options.module_type == .internal_bake_dev) {
+ p.print("false");
} else {
p.print("require.main");
}
@@ -2285,6 +2309,9 @@ fn NewPrinter(
if (p.options.require_ref) |require_ref| {
p.printSymbol(require_ref);
+ } else if (p.options.module_type == .internal_bake_dev) {
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".require");
} else {
p.print("require");
}
@@ -2296,6 +2323,9 @@ fn NewPrinter(
if (p.options.require_ref) |require_ref| {
p.printSymbol(require_ref);
p.print(".resolve");
+ } else if (p.options.module_type == .internal_bake_dev) {
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".require.resolve");
} else {
p.print("require.resolve");
}
@@ -3029,7 +3059,6 @@ fn NewPrinter(
.e_jsx_element,
.e_private_identifier,
- .e_template_part,
=> {
if (Environment.isDebug)
Output.panic("Unexpected expression of type .{s}", .{@tagName(expr.data)});
diff --git a/src/logger.zig b/src/logger.zig
index d0f1024d43..887911abfc 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -863,8 +863,10 @@ pub const Log = struct {
return self.appendToWithRecycled(other, source.contents_is_recycled);
}
- pub fn deinit(self: *Log) void {
- self.msgs.clearAndFree();
+ pub fn deinit(log: *Log) void {
+ log.msgs.clearAndFree();
+ // log.warnings = 0;
+ // log.errors = 0;
}
// TODO: remove `deinit` because it does not de-initialize the log; it clears it
diff --git a/src/output.zig b/src/output.zig
index f5318aca17..0730f89ac3 100644
--- a/src/output.zig
+++ b/src/output.zig
@@ -788,6 +788,8 @@ fn ScopedLogger(comptime tagname: []const u8, comptime disabled: bool) type {
return;
}
+ if (bun.crash_handler.isPanicking()) return;
+
if (Environment.enable_logs) ScopedDebugWriter.disable_inside_log += 1;
defer {
if (Environment.enable_logs)
diff --git a/test/bake/bake-harness.ts b/test/bake/bake-harness.ts
index 5c05881dba..d4c91f97c5 100644
--- a/test/bake/bake-harness.ts
+++ b/test/bake/bake-harness.ts
@@ -153,7 +153,7 @@ type ErrorSpec = string;
type FileObject = Record;
-export class Dev {
+export class Dev extends EventEmitter {
rootDir: string;
port: number;
baseUrl: string;
@@ -162,6 +162,8 @@ export class Dev {
options: { files: Record };
nodeEnv: "development" | "production";
+ socket?: WebSocket;
+
// These properties are not owned by this class
devProcess: Subprocess<"pipe", "pipe", "pipe">;
output: OutputLineStream;
@@ -174,6 +176,7 @@ export class Dev {
nodeEnv: "development" | "production",
options: DevServerTest,
) {
+ super();
this.rootDir = realpathSync(root);
this.port = port;
this.baseUrl = `http://localhost:${port}`;
@@ -186,6 +189,23 @@ export class Dev {
this.nodeEnv = nodeEnv;
}
+ connectSocket() {
+ const connected = Promise.withResolvers();
+ this.socket = new WebSocket(this.baseUrl + "/_bun/hmr");
+ this.socket.onmessage = (event) => {
+ const data = new Uint8Array(event.data as any);
+ if (data[0] === 'V'.charCodeAt(0)) {
+ this.socket!.send('sr');
+ connected.resolve();
+ }
+ if (data[0] === 'r'.charCodeAt(0)) {
+ this.emit("redundant_watch");
+ }
+ this.emit("hmr", data);
+ };
+ return connected.promise;
+ }
+
fetch(url: string, init?: RequestInit) {
return new DevFetchPromise(
(resolve, reject) => fetch(new URL(url, this.baseUrl).toString(), init).then(resolve, reject),
@@ -238,6 +258,37 @@ export class Dev {
await this.write(file, content, { dedent: false });
}
+ /**
+ * Deletes a file and waits for hot reload if in development mode
+ * @param file Path to the file to delete, relative to the root directory
+ * @param options Options for handling errors after deletion
+ * @returns Promise that resolves when the file is deleted and hot reload is complete (if applicable)
+ */
+ delete(file: string, options: { errors?: null | ErrorSpec[] } = {}) {
+ const snapshot = snapshotCallerLocation();
+ return withAnnotatedStack(snapshot, async () => {
+ await maybeWaitInteractive("delete " + file);
+ const isDev = this.nodeEnv === "development";
+ const wait = isDev && this.waitForHotReload();
+
+ const filePath = this.join(file);
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`File ${file} does not exist`);
+ }
+
+ fs.unlinkSync(filePath);
+ await wait;
+
+ let errors = options.errors;
+ if (isDev && errors !== null) {
+ errors ??= [];
+ for (const client of this.connectedClients) {
+ await client.expectErrorOverlay(errors, null);
+ }
+ }
+ });
+ }
+
patch(
file: string,
{
@@ -275,8 +326,9 @@ export class Dev {
async waitForHotReload() {
if (this.nodeEnv !== "development") return Promise.resolve();
- const err = this.output.waitForLine(/error/i);
- const success = this.output.waitForLine(/bundled page|bundled route|reloaded/i);
+ const err = this.output.waitForLine(/error/i).catch(() => {});
+ const success = this.output.waitForLine(/bundled page|bundled route|reloaded/i).catch(() => {});
+ const ctrl = new AbortController();
await Promise.race([
// On failure, give a little time in case a partial write caused a
// bundling error, and a success came in.
@@ -285,7 +337,9 @@ export class Dev {
() => {},
),
success,
+ EventEmitter.once(this, "redundant_watch", { signal: ctrl.signal }),
]);
+ ctrl.abort();
}
async client(
@@ -1366,6 +1420,8 @@ function testImpl(
BUN_DUMP_STATE_ON_CRASH: "1",
NODE_ENV,
// BUN_DEBUG_SERVER: "1",
+ BUN_DEBUG_DEVSERVER: "1",
+ BUN_DEBUG_WATCHER: "1",
},
]),
stdio: ["pipe", "pipe", "pipe"],
@@ -1383,6 +1439,9 @@ function testImpl(
const port = parseInt((await stream.waitForLine(/localhost:(\d+)/))[1], 10);
// @ts-expect-error
const dev = new Dev(root, port, devProcess, stream, NODE_ENV, options);
+ if (dev.nodeEnv === "development") {
+ await dev.connectSocket();
+ }
await maybeWaitInteractive("start");
@@ -1416,8 +1475,8 @@ function testImpl(
const name = `${
NODE_ENV === "development" //
? Bun.enableANSIColors
- ? "\x1b[35mDEV\x1b[0m"
- : "DEV"
+ ? " \x1b[35mDEV\x1b[0m"
+ : " DEV"
: Bun.enableANSIColors
? "\x1b[36mPROD\x1b[0m"
: "PROD"
@@ -1477,7 +1536,10 @@ export function devTest(description: string, options: T
// Capture the caller name as part of the test tempdir
const callerLocation = snapshotCallerLocation();
const caller = stackTraceFileName(callerLocation);
- assert(caller.startsWith(devTestRoot), "dev server tests must be in test/bake/dev, not " + caller);
+ assert(
+ caller.startsWith(devTestRoot) || caller.includes("dev-and-prod"),
+ "dev server tests must be in test/bake/dev, not " + caller,
+ );
return testImpl(description, options, "development", caller);
}
@@ -1485,7 +1547,10 @@ export function devTest(description: string, options: T
export function prodTest(description: string, options: T): T {
const callerLocation = snapshotCallerLocation();
const caller = stackTraceFileName(callerLocation);
- assert(caller.startsWith(prodTestRoot), "dev server tests must be in test/bake/prod, not " + caller);
+ assert(
+ caller.startsWith(prodTestRoot) || caller.includes("dev-and-prod"),
+ "prod server tests must be in test/bake/prod, not " + caller,
+ );
return testImpl(description, options, "production", caller);
}
diff --git a/test/bake/client-fixture.mjs b/test/bake/client-fixture.mjs
index cd198a3c1a..18d205acd0 100644
--- a/test/bake/client-fixture.mjs
+++ b/test/bake/client-fixture.mjs
@@ -67,7 +67,29 @@ function createWindow(windowUrl) {
webSockets.push(this);
this.addEventListener("message", event => {
if (!allowWebSocketMessages) {
- console.error("[E] WebSocket message received while messages are not allowed");
+ const data = new Uint8Array(event.data);
+ console.error(
+ "[E] WebSocket message received while messages are not allowed. Event type",
+ JSON.stringify(String.fromCharCode(data[0])),
+ );
+ let hexDump = "";
+ for (let i = 0; i < data.length; i += 16) {
+ // Print offset
+ hexDump += "\x1b[2m" + i.toString(16).padStart(4, "0") + "\x1b[0m ";
+ // Print hex values
+ const chunk = data.slice(i, i + 16);
+ const hexValues = Array.from(chunk)
+ .map(b => b.toString(16).padStart(2, "0"))
+ .join(" ");
+ hexDump += hexValues.padEnd(48, " ");
+ // Print ASCII
+ hexDump += "\x1b[2m| \x1b[0m";
+ for (const byte of chunk) {
+ hexDump += byte >= 32 && byte <= 126 ? String.fromCharCode(byte) : "\x1b[2m.\x1b[0m";
+ }
+ hexDump += "\n";
+ }
+ console.error(hexDump);
process.exit(2);
}
});
diff --git a/test/bake/dev-and-prod.test.ts b/test/bake/dev-and-prod.test.ts
index 042c5e597c..71cf3deb7f 100644
--- a/test/bake/dev-and-prod.test.ts
+++ b/test/bake/dev-and-prod.test.ts
@@ -1,5 +1,5 @@
// Tests which apply to both dev and prod. They are run twice.
-import { devAndProductionTest, emptyHtmlFile } from "./bake-harness";
+import { devAndProductionTest, devTest, emptyHtmlFile } from "./bake-harness";
devAndProductionTest("define config via bunfig.toml", {
files: {
@@ -129,3 +129,65 @@ devAndProductionTest("inline script and styles appear", {
await c.style("body").backgroundColor.expect.toBe("red");
},
});
+// TODO: revive production
+devTest("using runtime import", {
+ files: {
+ "index.html": emptyHtmlFile({
+ styles: [],
+ scripts: ["index.ts"],
+ }),
+ "index.ts": `
+ // __using
+ {
+ using a = { [Symbol.dispose]: () => console.log("a") };
+ console.log("b");
+ }
+
+ // __legacyDecorateClassTS
+ function undefinedDecorator(target) {
+ console.log("decorator");
+ }
+ @undefinedDecorator
+ class x {}
+
+ // __require
+ const A = () => require;
+ const B = () => module.require;
+ const C = () => import.meta.require;
+ if (import.meta.hot) {
+ console.log(A.toString().replaceAll(" ", "").replaceAll("\\n", ""));
+ console.log(B.toString().replaceAll(" ", "").replaceAll("\\n", ""));
+ console.log(C.toString().replaceAll(" ", "").replaceAll("\\n", ""));
+ console.log(A() === eval("module.require"));
+ console.log(B() === eval("module.require"));
+ console.log(C() === eval("module.require"));
+ }
+ `,
+ "tsconfig.json": `
+ {
+ "compilerOptions": {
+ "experimentalDecorators": true
+ }
+ }
+ `,
+ },
+ async test(dev) {
+ await using c = await dev.client("/");
+ await c.expectMessage(
+ "b",
+ "a",
+ "decorator",
+ ...(dev.nodeEnv === "development"
+ ? [
+ //
+ "()=>module.require",
+ "()=>module.require",
+ "()=>module.importMeta.require",
+ true,
+ true,
+ true,
+ ]
+ : []),
+ );
+ },
+});
diff --git a/test/bake/dev/bundle.test.ts b/test/bake/dev/bundle.test.ts
index dac52dc123..8797582c52 100644
--- a/test/bake/dev/bundle.test.ts
+++ b/test/bake/dev/bundle.test.ts
@@ -228,3 +228,40 @@ devTest("directory cache bust case #17576", {
await c.expectMessage(456);
},
});
+devTest("deleting imported file shows error then recovers", {
+ files: {
+ "index.html": emptyHtmlFile({
+ styles: [],
+ scripts: ["index.ts"],
+ }),
+ "index.ts": `
+ import { value } from "./other";
+ console.log(value);
+ `,
+ "other.ts": `
+ export const value = 123;
+ `,
+ "unrelated.ts": `
+ export const value = 123;
+ `,
+ },
+ async test(dev) {
+ await using c = await dev.client("/");
+ await c.expectMessage(123);
+ await dev.delete("other.ts", {
+ errors: ['index.ts:1:23: error: Could not resolve: "./other"'],
+ });
+ await c.expectReload(async () => {
+ await dev.write(
+ "other.ts",
+ `
+ export const value = 456;
+ `,
+ );
+ });
+ await c.expectMessage(456);
+ await c.expectNoWebSocketActivity(async () => {
+ await dev.delete("unrelated.ts");
+ });
+ },
+});
diff --git a/test/bake/dev/html.test.ts b/test/bake/dev/html.test.ts
index 3db1604f50..6149104c5d 100644
--- a/test/bake/dev/html.test.ts
+++ b/test/bake/dev/html.test.ts
@@ -1,17 +1,12 @@
-// Bundle tests are tests concerning bundling bugs that only occur in DevServer.
-import { devTest } from "../bake-harness";
+// HTML tests are tests relating to HTML files themselves.
+import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
- "index.html": `
-
-
-
- Hello
-
-
-
- `,
+ "index.html": emptyHtmlFile({
+ scripts: ["/script.ts"],
+ body: "Hello
",
+ }),
"script.ts": `
console.log("hello");
`,
diff --git a/test/bake/dev/dev-plugins.test.ts b/test/bake/dev/plugins.test.ts
similarity index 100%
rename from test/bake/dev/dev-plugins.test.ts
rename to test/bake/dev/plugins.test.ts
diff --git a/test/bundler/bun-build-api.test.ts b/test/bundler/bun-build-api.test.ts
index 5150dcab75..0524231ae0 100644
--- a/test/bundler/bun-build-api.test.ts
+++ b/test/bundler/bun-build-api.test.ts
@@ -232,6 +232,7 @@ describe("Bun.build", () => {
entrypoints: [join(import.meta.dir, "./fixtures/trivial/index.js")],
outdir,
});
+ console.log(await x.outputs[0].text());
const [blob] = x.outputs;
expect(blob).toBeTruthy();
expect(blob.type).toBe("text/javascript;charset=utf-8");
diff --git a/test/bundler/bundler_minify.test.ts b/test/bundler/bundler_minify.test.ts
index 8d07f3727c..7c51ffd5f1 100644
--- a/test/bundler/bundler_minify.test.ts
+++ b/test/bundler/bundler_minify.test.ts
@@ -502,4 +502,108 @@ describe("bundler", () => {
'+"æ"',
],
});
+ itBundled("minify/ImportMetaHotTreeShaking", {
+ files: {
+ "/entry.ts": `
+ capture(import.meta.hot);
+ if (import.meta.hot) {
+ console.log("This should be removed");
+ throw new Error("This should be removed");
+ }
+ if (import.meta.hot != undefined) {
+ console.log("This should be removed");
+ throw new Error("This should be removed");
+ }
+ capture("This should remain");
+ `,
+ },
+ outfile: "/out.js",
+ capture: ["void 0", '"This should remain"'],
+ minifySyntax: true,
+ onAfterBundle(api) {
+ api.expectFile("/out.js").not.toContain("console.log");
+ api.expectFile("/out.js").not.toContain("throw");
+ api.expectFile("/out.js").not.toContain("import.meta.hot");
+ },
+ });
+ itBundled("minify/ProductionMode", {
+ files: {
+ "/entry.jsx": `
+ import {foo} from 'dev-trap';
+ capture(process.env.NODE_ENV);
+ capture(1232 + 521)
+ console.log(Hello
);
+ `,
+ "/node_modules/react/jsx-dev-runtime.js": `
+ throw new Error("Should not use dev runtime");
+ `,
+ "/node_modules/react/jsx-runtime.js": `
+ export function jsx(type, props) {
+ return {type, props};
+ }
+ export const Fragment = Symbol.for("jsx-runtime");
+ `,
+ "/node_modules/dev-trap/package.json": `{
+ "name": "dev-trap",
+ "exports": {
+ "development": "./dev.js",
+ "default": "./prod.js"
+ }
+ }`,
+ "/node_modules/dev-trap/dev.js": `
+ throw new Error("FAIL");
+ `,
+ "/node_modules/dev-trap/prod.js": `
+ export const foo = "production";
+ `,
+ },
+ capture: ['"production"', "1753"],
+ production: true,
+ onAfterBundle(api) {
+ const output = api.readFile("out.js");
+
+ expect(output).not.toContain("FAIL");
+
+ // Check minification
+ expect(output).not.toContain("\t");
+ expect(output).not.toContain(" ");
+
+ // Check NODE_ENV is inlined
+ expect(output).toContain('"production"');
+ expect(output).not.toContain("process.env.NODE_ENV");
+
+ // Check JSX uses production runtime
+ expect(output).toContain("jsx-runtime");
+ },
+ });
+ itBundled("minify/UnusedInCommaExpression", {
+ files: {
+ "/entry.ts": `
+ let flag = computeSomethingUnknown();
+ // the expression 'flag === 1' has no side effects
+ capture((flag === 1234 ? "a" : "b", "c"));
+ // 'flag == 1234' may invoke a side effect
+ capture((flag == 1234 ? "a" : "b", "c"));
+ // 'unbound' may invoke a side effect
+ capture((unbound ? "a" : "b", "c"));
+ // two side effects
+ capture((flag == 1234 ? "a" : unbound, "c"));
+ // two side effects 2
+ capture(([flag == 1234] ? unbound : other, "c"));
+ `,
+ },
+ minifySyntax: true,
+ capture: [
+ // 'flag' cannot throw on access or comparison via '==='
+ '"c"',
+ // 0 is inserted instead of 1234 because it is shorter and invokes the same coercion side effects
+ '(flag == 0, "c")',
+ // 'unbound' may throw on access
+ '(unbound, "c")',
+ // 0 is not inserted here because the result of 'flag == 1234' is used by the ternary
+ '(flag == 1234 || unbound, "c")',
+ // || is not inserted since the condition is always true, can simplify '1234' to '0'
+ '(flag == 0, unbound, "c")',
+ ],
+ });
});
diff --git a/test/bundler/cli.test.ts b/test/bundler/cli.test.ts
index 070c6cea98..98813c3a7b 100644
--- a/test/bundler/cli.test.ts
+++ b/test/bundler/cli.test.ts
@@ -171,3 +171,53 @@ test.skipIf(!isWindows)("should be able to handle pretty path on windows #13897"
});
expect(buildOut?.success).toBe(true);
});
+
+test("you can use --outfile=... and --sourcemap", () => {
+ const tmpdir = tmpdirSync();
+ const inputFile = path.join(tmpdir, "input.js");
+ const outFile = path.join(tmpdir, "out.js");
+
+ writeFileSync(inputFile, 'console.log("Hello, world!");');
+
+ const originalContent = fs.readFileSync(inputFile, "utf8");
+
+ const { exitCode, stdout } = Bun.spawnSync({
+ cmd: [bunExe(), "build", "--outfile=" + path.relative(tmpdir, outFile), "--sourcemap", inputFile],
+ env: bunEnv,
+ cwd: tmpdir,
+ });
+
+ expect(exitCode).toBe(0);
+
+ // Verify that the input file wasn't overwritten
+ expect(fs.readFileSync(inputFile, "utf8")).toBe(originalContent);
+
+ // Verify that the output file was created
+ expect(fs.existsSync(outFile)).toBe(true);
+
+ // Verify that the sourcemap file was created
+ expect(fs.existsSync(outFile + ".map")).toBe(true);
+
+ // Verify that the output file contains sourceMappingURL comment
+ const outputContent = fs.readFileSync(outFile, "utf8");
+ expect(outputContent).toContain("//# sourceMappingURL=out.js.map");
+
+ expect(stdout.toString()).toMatchInlineSnapshot();
+});
+
+test("some log cases", () => {
+ const tmpdir = tmpdirSync();
+ const inputFile = path.join(tmpdir, "input.js");
+ const outFile = path.join(tmpdir, "out.js");
+
+ writeFileSync(inputFile, 'console.log("Hello, world!");');
+
+ // absolute path
+ const { exitCode, stdout } = Bun.spawnSync({
+ cmd: [bunExe(), "build", "--outfile=" + outFile, "--sourcemap", inputFile],
+ env: bunEnv,
+ cwd: tmpdir,
+ });
+ expect(exitCode).toBe(0);
+ expect(stdout.toString()).toMatchInlineSnapshot();
+});
diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts
index 52e5c024e5..ba04cc93fb 100644
--- a/test/bundler/expectBundled.ts
+++ b/test/bundler/expectBundled.ts
@@ -205,6 +205,7 @@ export interface BundlerTestInput {
sourceMap?: "inline" | "external" | "linked" | "none" | "linked";
plugins?: BunPlugin[] | ((builder: PluginBuilder) => void | Promise);
install?: string[];
+ production?: boolean;
// pass subprocess.env
env?: Record;
@@ -470,6 +471,7 @@ function expectBundled(
ignoreDCEAnnotations,
bytecode = false,
emitDCEAnnotations,
+ production,
// @ts-expect-error
_referenceFn,
expectExactFilesize,
@@ -638,7 +640,7 @@ function expectBundled(
typeof contents === "string"
? dedent(contents).replaceAll("{{root}}", root.replaceAll("\\", "\\\\"))
: contents;
- writeFileSync(filename, formattedContents);
+ writeFileSync(filename, formattedContents as any);
}
if (useDefineForClassFields !== undefined) {
@@ -719,6 +721,7 @@ function expectBundled(
loader && Object.entries(loader).map(([k, v]) => ["--loader", `${k}:${v}`]),
publicPath && `--public-path=${publicPath}`,
bytecode && "--bytecode",
+ production && "--production",
]
: [
ESBUILD_PATH,