mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
16 Commits
claude/imp
...
zack/dev-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16131f92e1 | ||
|
|
59a4d0697b | ||
|
|
1d0984b5c4 | ||
|
|
dfa93a8ede | ||
|
|
c8773c5e30 | ||
|
|
0f74fafc59 | ||
|
|
47d6e161fe | ||
|
|
160625c37c | ||
|
|
1b9b686772 | ||
|
|
6f3e098bac | ||
|
|
4c6b296a7c | ||
|
|
2ab962bf6b | ||
|
|
f556fc987c | ||
|
|
3a1b12ee61 | ||
|
|
a952b4200e | ||
|
|
24485fb432 |
@@ -3,6 +3,7 @@ packages/bun-usockets/src/crypto/sni_tree.cpp
|
||||
src/bake/BakeGlobalObject.cpp
|
||||
src/bake/BakeProduction.cpp
|
||||
src/bake/BakeSourceProvider.cpp
|
||||
src/bake/DevServerSourceProvider.cpp
|
||||
src/bun.js/bindings/ActiveDOMCallback.cpp
|
||||
src/bun.js/bindings/AsymmetricKeyValue.cpp
|
||||
src/bun.js/bindings/AsyncContextFrame.cpp
|
||||
|
||||
@@ -252,6 +252,12 @@ pub inline fn downcast(a: Allocator) ?*AllocationScope {
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn leakSlice(scope: *AllocationScope, memory: anytype) void {
|
||||
if (comptime !enabled) return;
|
||||
_ = @typeInfo(@TypeOf(memory)).pointer;
|
||||
bun.assert(!scope.trackExternalFree(memory, null));
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// clang-format off
|
||||
#include "BakeSourceProvider.h"
|
||||
#include "DevServerSourceProvider.h"
|
||||
#include "BakeGlobalObject.h"
|
||||
#include "JavaScriptCore/CallData.h"
|
||||
#include "JavaScriptCore/Completion.h"
|
||||
@@ -78,6 +79,34 @@ extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunS
|
||||
return JSC::JSValue::encode(result);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatchWithSourceMap(GlobalObject* global, BunString source, const char* sourceMapJSONPtr, size_t sourceMapJSONLength) {
|
||||
JSC::VM&vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
String string = "bake://server.patch.js"_s;
|
||||
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
|
||||
|
||||
// Use DevServerSourceProvider with the source map JSON
|
||||
auto provider = DevServerSourceProvider::create(
|
||||
global,
|
||||
source.toWTFString(),
|
||||
sourceMapJSONPtr,
|
||||
sourceMapJSONLength,
|
||||
origin,
|
||||
WTFMove(string),
|
||||
WTF::TextPosition(),
|
||||
JSC::SourceProviderSourceType::Program
|
||||
);
|
||||
|
||||
JSC::SourceCode sourceCode = JSC::SourceCode(provider);
|
||||
|
||||
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
RELEASE_ASSERT(result);
|
||||
return JSC::JSValue::encode(result);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeGetModuleNamespace(
|
||||
JSC::JSGlobalObject* global,
|
||||
JSC::JSValue keyValue
|
||||
|
||||
@@ -839,6 +839,7 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void {
|
||||
arena.allocator(),
|
||||
source_id.kind,
|
||||
dev.allocator,
|
||||
.client,
|
||||
) catch bun.outOfMemory();
|
||||
const response = StaticRoute.initFromAnyBlob(&.fromOwnedSlice(dev.allocator, json_bytes), .{
|
||||
.server = dev.server,
|
||||
@@ -1181,7 +1182,7 @@ fn onFrameworkRequestWithBundle(
|
||||
const route_bundle = dev.routeBundlePtr(route_bundle_index);
|
||||
assert(route_bundle.data == .framework);
|
||||
|
||||
const bundle = &route_bundle.data.framework;
|
||||
const framework_bundle = &route_bundle.data.framework;
|
||||
|
||||
// Extract route params by re-matching the URL
|
||||
var params: FrameworkRouter.MatchedParams = undefined;
|
||||
@@ -1238,9 +1239,9 @@ fn onFrameworkRequestWithBundle(
|
||||
const server_request_callback = dev.server_fetch_function_callback.get() orelse
|
||||
unreachable; // did not initialize server code
|
||||
|
||||
const router_type = dev.router.typePtr(dev.router.routePtr(bundle.route_index).type);
|
||||
const router_type = dev.router.typePtr(dev.router.routePtr(framework_bundle.route_index).type);
|
||||
|
||||
dev.server.?.onRequestFromSaved(
|
||||
dev.server.?.onSavedRequest(
|
||||
req,
|
||||
resp,
|
||||
server_request_callback,
|
||||
@@ -1256,17 +1257,17 @@ fn onFrameworkRequestWithBundle(
|
||||
break :str str;
|
||||
},
|
||||
// routeModules
|
||||
bundle.cached_module_list.get() orelse arr: {
|
||||
framework_bundle.cached_module_list.get() orelse arr: {
|
||||
const global = dev.vm.global;
|
||||
const keys = dev.server_graph.bundled_files.keys();
|
||||
var n: usize = 1;
|
||||
var route = dev.router.routePtr(bundle.route_index);
|
||||
var route = dev.router.routePtr(framework_bundle.route_index);
|
||||
while (true) {
|
||||
if (route.file_layout != .none) n += 1;
|
||||
route = dev.router.routePtr(route.parent.unwrap() orelse break);
|
||||
}
|
||||
const arr = try JSValue.createEmptyArray(global, n);
|
||||
route = dev.router.routePtr(bundle.route_index);
|
||||
route = dev.router.routePtr(framework_bundle.route_index);
|
||||
{
|
||||
const relative_path_buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(relative_path_buf);
|
||||
@@ -1287,11 +1288,11 @@ fn onFrameworkRequestWithBundle(
|
||||
}
|
||||
route = dev.router.routePtr(route.parent.unwrap() orelse break);
|
||||
}
|
||||
bundle.cached_module_list = .create(arr, global);
|
||||
framework_bundle.cached_module_list = .create(arr, global);
|
||||
break :arr arr;
|
||||
},
|
||||
// clientId
|
||||
bundle.cached_client_bundle_url.get() orelse str: {
|
||||
framework_bundle.cached_client_bundle_url.get() orelse str: {
|
||||
const bundle_index: u32 = route_bundle_index.get();
|
||||
const generation: u32 = route_bundle.client_script_generation;
|
||||
const str = bun.String.createFormat(client_prefix ++ "/route-{}{}.js", .{
|
||||
@@ -1300,13 +1301,13 @@ fn onFrameworkRequestWithBundle(
|
||||
}) catch bun.outOfMemory();
|
||||
defer str.deref();
|
||||
const js = str.toJS(dev.vm.global);
|
||||
bundle.cached_client_bundle_url = .create(js, dev.vm.global);
|
||||
framework_bundle.cached_client_bundle_url = .create(js, dev.vm.global);
|
||||
break :str js;
|
||||
},
|
||||
// styles
|
||||
bundle.cached_css_file_array.get() orelse arr: {
|
||||
framework_bundle.cached_css_file_array.get() orelse arr: {
|
||||
const js = dev.generateCssJSArray(route_bundle) catch bun.outOfMemory();
|
||||
bundle.cached_css_file_array = .create(js, dev.vm.global);
|
||||
framework_bundle.cached_css_file_array = .create(js, dev.vm.global);
|
||||
break :arr js;
|
||||
},
|
||||
// params
|
||||
@@ -1476,7 +1477,7 @@ fn generateJavaScriptCodeForHTMLFile(
|
||||
|
||||
pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse, method: bun.http.Method) void {
|
||||
const route_bundle = dev.routeBundlePtr(bundle_index);
|
||||
const blob = route_bundle.client_bundle orelse generate: {
|
||||
const client_bundle = route_bundle.client_bundle orelse generate: {
|
||||
const payload = dev.generateClientBundle(route_bundle) catch bun.outOfMemory();
|
||||
errdefer dev.allocator.free(payload);
|
||||
route_bundle.client_bundle = StaticRoute.initFromAnyBlob(
|
||||
@@ -1489,7 +1490,7 @@ pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, r
|
||||
break :generate route_bundle.client_bundle.?;
|
||||
};
|
||||
dev.source_maps.addWeakRef(route_bundle.sourceMapId());
|
||||
blob.onWithMethod(method, resp);
|
||||
client_bundle.onWithMethod(method, resp);
|
||||
}
|
||||
|
||||
pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void {
|
||||
@@ -2266,14 +2267,65 @@ pub fn finalizeBundle(
|
||||
|
||||
// Load all new chunks into the server runtime.
|
||||
if (!dev.frontend_only and dev.server_graph.current_chunk_len > 0) {
|
||||
const server_bundle = try dev.server_graph.takeJSBundle(&.{ .kind = .hmr_chunk });
|
||||
// Generate a script_id for server bundles
|
||||
// Use high bit set to distinguish from client bundles, and include generation
|
||||
const server_script_id = SourceMapStore.Key.init((1 << 63) | @as(u64, dev.generation));
|
||||
|
||||
// Get the source map if available and render to JSON
|
||||
var source_map_json = if (dev.server_graph.current_chunk_source_maps.items.len > 0) json: {
|
||||
// Create a temporary source map entry to render
|
||||
var source_map_entry = SourceMapStore.Entry{
|
||||
.ref_count = 1,
|
||||
.paths = &.{},
|
||||
.files = .empty,
|
||||
.overlapping_memory_cost = 0,
|
||||
};
|
||||
|
||||
// Fill the source map entry
|
||||
var arena = std.heap.ArenaAllocator.init(dev.allocator);
|
||||
defer arena.deinit();
|
||||
try dev.server_graph.takeSourceMap(arena.allocator(), dev.allocator, &source_map_entry);
|
||||
defer {
|
||||
source_map_entry.ref_count = 0;
|
||||
source_map_entry.deinit(dev);
|
||||
}
|
||||
|
||||
const json_data = try source_map_entry.renderJSON(
|
||||
dev,
|
||||
arena.allocator(),
|
||||
.hmr_chunk,
|
||||
dev.allocator,
|
||||
.server,
|
||||
);
|
||||
break :json json_data;
|
||||
} else null;
|
||||
defer if (source_map_json) |json| bun.default_allocator.free(json);
|
||||
|
||||
const server_bundle = try dev.server_graph.takeJSBundle(&.{
|
||||
.kind = .hmr_chunk,
|
||||
.script_id = server_script_id,
|
||||
});
|
||||
defer dev.allocator.free(server_bundle);
|
||||
|
||||
const server_modules = c.BakeLoadServerHmrPatch(@ptrCast(dev.vm.global), bun.String.cloneLatin1(server_bundle)) catch |err| {
|
||||
// No user code has been evaluated yet, since everything is to
|
||||
// be wrapped in a function clousure. This means that the likely
|
||||
// error is going to be a syntax error, or other mistake in the
|
||||
// bundler.
|
||||
const server_modules = if (bun.take(&source_map_json)) |json| blk: {
|
||||
// This memory will be owned by the `DevServerSourceProvider` in C++
|
||||
// from here on out
|
||||
dev.allocation_scope.leakSlice(json);
|
||||
|
||||
break :blk c.BakeLoadServerHmrPatchWithSourceMap(
|
||||
@ptrCast(dev.vm.global),
|
||||
bun.String.cloneUTF8(server_bundle),
|
||||
json.ptr,
|
||||
json.len,
|
||||
) catch |err| {
|
||||
// No user code has been evaluated yet, since everything is to
|
||||
// be wrapped in a function clousure. This means that the likely
|
||||
// error is going to be a syntax error, or other mistake in the
|
||||
// bundler.
|
||||
dev.vm.printErrorLikeObjectToConsole(dev.vm.global.takeException(err));
|
||||
@panic("Error thrown while evaluating server code. This is always a bug in the bundler.");
|
||||
};
|
||||
} else c.BakeLoadServerHmrPatch(@ptrCast(dev.vm.global), bun.String.cloneLatin1(server_bundle)) catch |err| {
|
||||
dev.vm.printErrorLikeObjectToConsole(dev.vm.global.takeException(err));
|
||||
@panic("Error thrown while evaluating server code. This is always a bug in the bundler.");
|
||||
};
|
||||
@@ -3595,6 +3647,11 @@ const c = struct {
|
||||
return bun.jsc.fromJSHostCall(global, @src(), f, .{ global, code });
|
||||
}
|
||||
|
||||
fn BakeLoadServerHmrPatchWithSourceMap(global: *jsc.JSGlobalObject, code: bun.String, source_map_json_ptr: [*]const u8, source_map_json_len: usize) bun.JSError!JSValue {
|
||||
const f = @extern(*const fn (*jsc.JSGlobalObject, bun.String, [*]const u8, usize) callconv(.c) JSValue, .{ .name = "BakeLoadServerHmrPatchWithSourceMap" }).*;
|
||||
return bun.jsc.fromJSHostCall(global, @src(), f, .{ global, code, source_map_json_ptr, source_map_json_len });
|
||||
}
|
||||
|
||||
fn BakeLoadInitialServerCode(global: *jsc.JSGlobalObject, code: bun.String, separate_ssr_graph: bool) bun.JSError!JSValue {
|
||||
const f = @extern(*const fn (*jsc.JSGlobalObject, bun.String, bool) callconv(.c) JSValue, .{ .name = "BakeLoadInitialServerCode" }).*;
|
||||
return bun.jsc.fromJSHostCall(global, @src(), f, .{ global, code, separate_ssr_graph });
|
||||
|
||||
@@ -76,6 +76,12 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
.server => void,
|
||||
},
|
||||
|
||||
/// Source maps for server chunks
|
||||
current_chunk_source_maps: if (side == .server) ArrayListUnmanaged(PackedMap.RefOrEmpty) else void = if (side == .server) .empty,
|
||||
|
||||
/// File indices for server chunks to track which file each chunk comes from
|
||||
current_chunk_file_indices: if (side == .server) ArrayListUnmanaged(FileIndex) else void = if (side == .server) .empty,
|
||||
|
||||
pub const empty: @This() = .{
|
||||
.bundled_files = .empty,
|
||||
.stale_files = .empty,
|
||||
@@ -89,6 +95,8 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
.current_chunk_parts = .empty,
|
||||
|
||||
.current_css_files = if (side == .client) .empty,
|
||||
.current_chunk_source_maps = if (side == .server) .empty else {},
|
||||
.current_chunk_file_indices = if (side == .server) .empty else {},
|
||||
};
|
||||
|
||||
pub const File = switch (side) {
|
||||
@@ -324,6 +332,13 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
.current_chunk_len = {},
|
||||
.current_chunk_parts = g.current_chunk_parts.deinit(allocator),
|
||||
.current_css_files = if (side == .client) g.current_css_files.deinit(allocator),
|
||||
.current_chunk_source_maps = if (side == .server) {
|
||||
for (g.current_chunk_source_maps.items) |source_map| {
|
||||
source_map.deref(&g.owner().*);
|
||||
}
|
||||
g.current_chunk_source_maps.deinit(allocator);
|
||||
},
|
||||
.current_chunk_file_indices = if (side == .server) g.current_chunk_file_indices.deinit(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -356,6 +371,14 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
.empty => {},
|
||||
}
|
||||
}
|
||||
} else if (side == .server) {
|
||||
graph += DevServer.memoryCostArrayList(g.current_chunk_source_maps);
|
||||
graph += DevServer.memoryCostArrayList(g.current_chunk_file_indices);
|
||||
for (g.current_chunk_source_maps.items) |source_map| {
|
||||
if (source_map == .ref) {
|
||||
source_maps += source_map.ref.data.memoryCostWithDedupe(new_dedupe_bits);
|
||||
}
|
||||
}
|
||||
}
|
||||
return .{
|
||||
.graph = graph,
|
||||
@@ -488,7 +511,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
.js => |js| {
|
||||
// Insert new source map or patch existing empty source map.
|
||||
const source_map: PackedMap.RefOrEmpty = brk: {
|
||||
if (js.source_map) |source_map| {
|
||||
if (js.source_map) |*source_map| {
|
||||
bun.debugAssert(!flags.is_html_route); // suspect behind #17956
|
||||
if (source_map.chunk.buffer.len() > 0) {
|
||||
flags.source_map_state = .ref;
|
||||
@@ -585,12 +608,40 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
if (content == .js) {
|
||||
try g.current_chunk_parts.append(dev.allocator, content.js.code);
|
||||
g.current_chunk_len += content.js.code.len;
|
||||
if (content.js.source_map) |source_map| {
|
||||
var take = source_map.chunk.buffer;
|
||||
take.deinit();
|
||||
if (source_map.escaped_source) |escaped_source| {
|
||||
bun.default_allocator.free(escaped_source);
|
||||
|
||||
// Track the file index for this chunk
|
||||
try g.current_chunk_file_indices.append(dev.allocator, file_index);
|
||||
|
||||
// TODO: we probably want to store SSR chunks but not
|
||||
// server chunks, but not 100% sure
|
||||
const should_immediately_free_sourcemap = false;
|
||||
if (should_immediately_free_sourcemap) {
|
||||
if (content.js.source_map) |source_map| {
|
||||
var take = source_map.chunk.buffer;
|
||||
take.deinit();
|
||||
if (source_map.escaped_source) |escaped_source| {
|
||||
bun.default_allocator.free(escaped_source);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (content.js.source_map) |source_map| append_empty: {
|
||||
const packed_map = PackedMap.newNonEmpty(source_map.chunk, source_map.escaped_source orelse break :append_empty);
|
||||
try g.current_chunk_source_maps.append(dev.allocator, .{
|
||||
.ref = packed_map,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Must precompute this. Otherwise, source maps won't have
|
||||
// the info needed to concatenate VLQ mappings.
|
||||
const count: u32 = @intCast(bun.strings.countChar(content.js.code, '\n'));
|
||||
try g.current_chunk_source_maps.append(dev.allocator, PackedMap.RefOrEmpty{
|
||||
.empty = .{
|
||||
.line_count = .init(count),
|
||||
// TODO: not sure if this is correct
|
||||
.html_bundle_route_index = .none,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1577,7 +1628,12 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
g.owner().graph_safety_lock.assertLocked();
|
||||
g.current_chunk_len = 0;
|
||||
g.current_chunk_parts.clearRetainingCapacity();
|
||||
if (side == .client) g.current_css_files.clearRetainingCapacity();
|
||||
if (side == .client) {
|
||||
g.current_css_files.clearRetainingCapacity();
|
||||
} else if (side == .server) {
|
||||
g.current_chunk_source_maps.clearRetainingCapacity();
|
||||
g.current_chunk_file_indices.clearRetainingCapacity();
|
||||
}
|
||||
}
|
||||
|
||||
const TakeJSBundleOptions = switch (side) {
|
||||
@@ -1590,6 +1646,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
},
|
||||
.server => struct {
|
||||
kind: ChunkKind,
|
||||
script_id: SourceMapStore.Key,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1732,44 +1789,70 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
};
|
||||
|
||||
/// Uses `arena` as a temporary allocator, fills in all fields of `out` except ref_count
|
||||
pub fn takeSourceMap(g: *@This(), arena: std.mem.Allocator, gpa: Allocator, out: *SourceMapStore.Entry) bun.OOM!void {
|
||||
if (side == .server) @compileError("not implemented");
|
||||
|
||||
pub fn takeSourceMap(g: *@This(), _: std.mem.Allocator, gpa: Allocator, out: *SourceMapStore.Entry) bun.OOM!void {
|
||||
const paths = g.bundled_files.keys();
|
||||
const files = g.bundled_files.values();
|
||||
|
||||
// This buffer is temporary, holding the quoted source paths, joined with commas.
|
||||
var source_map_strings = std.ArrayList(u8).init(arena);
|
||||
defer source_map_strings.deinit();
|
||||
switch (side) {
|
||||
.client => {
|
||||
const files = g.bundled_files.values();
|
||||
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
const buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(buf);
|
||||
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.RefOrEmpty) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.RefOrEmpty) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
|
||||
var overlapping_memory_cost: u32 = 0;
|
||||
var overlapping_memory_cost: u32 = 0;
|
||||
|
||||
for (g.current_chunk_parts.items) |file_index| {
|
||||
file_paths.appendAssumeCapacity(paths[file_index.get()]);
|
||||
const source_map = files[file_index.get()].sourceMap();
|
||||
contained_maps.appendAssumeCapacity(source_map.dupeRef());
|
||||
if (source_map == .ref) {
|
||||
overlapping_memory_cost += @intCast(source_map.ref.data.memoryCost());
|
||||
}
|
||||
for (g.current_chunk_parts.items) |file_index| {
|
||||
file_paths.appendAssumeCapacity(paths[file_index.get()]);
|
||||
const source_map = files[file_index.get()].sourceMap();
|
||||
contained_maps.appendAssumeCapacity(source_map.dupeRef());
|
||||
if (source_map == .ref) {
|
||||
overlapping_memory_cost += @intCast(source_map.ref.data.memoryCost());
|
||||
}
|
||||
}
|
||||
|
||||
overlapping_memory_cost += @intCast(contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items));
|
||||
|
||||
out.* = .{
|
||||
.ref_count = out.ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = overlapping_memory_cost,
|
||||
};
|
||||
},
|
||||
.server => {
|
||||
var file_paths = try ArrayListUnmanaged([]const u8).initCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer file_paths.deinit(gpa);
|
||||
var contained_maps: bun.MultiArrayList(PackedMap.RefOrEmpty) = .empty;
|
||||
try contained_maps.ensureTotalCapacity(gpa, g.current_chunk_parts.items.len);
|
||||
errdefer contained_maps.deinit(gpa);
|
||||
|
||||
var overlapping_memory_cost: u32 = 0;
|
||||
|
||||
// For server, we use the tracked file indices to get the correct paths
|
||||
for (g.current_chunk_file_indices.items, g.current_chunk_source_maps.items) |file_index, source_map| {
|
||||
file_paths.appendAssumeCapacity(paths[file_index.get()]);
|
||||
contained_maps.appendAssumeCapacity(source_map.dupeRef());
|
||||
if (source_map == .ref) {
|
||||
overlapping_memory_cost += @intCast(source_map.ref.data.memoryCost());
|
||||
}
|
||||
}
|
||||
|
||||
overlapping_memory_cost += @intCast(contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items));
|
||||
|
||||
out.* = .{
|
||||
.ref_count = out.ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = overlapping_memory_cost,
|
||||
};
|
||||
},
|
||||
}
|
||||
|
||||
overlapping_memory_cost += @intCast(contained_maps.memoryCost() + DevServer.memoryCostSlice(file_paths.items));
|
||||
|
||||
out.* = .{
|
||||
.ref_count = out.ref_count,
|
||||
.paths = file_paths.items,
|
||||
.files = contained_maps,
|
||||
.overlapping_memory_cost = overlapping_memory_cost,
|
||||
};
|
||||
}
|
||||
|
||||
fn disconnectAndDeleteFile(g: *@This(), file_index: FileIndex) void {
|
||||
|
||||
@@ -75,11 +75,11 @@ pub const Entry = struct {
|
||||
pub fn renderMappings(map: Entry, kind: ChunkKind, arena: Allocator, gpa: Allocator) ![]u8 {
|
||||
var j: StringJoiner = .{ .allocator = arena };
|
||||
j.pushStatic("AAAA");
|
||||
try joinVLQ(&map, kind, &j, arena);
|
||||
try joinVLQ(&map, kind, &j, arena, .client);
|
||||
return j.done(gpa);
|
||||
}
|
||||
|
||||
pub fn renderJSON(map: *const Entry, dev: *DevServer, arena: Allocator, kind: ChunkKind, gpa: Allocator) ![]u8 {
|
||||
pub fn renderJSON(map: *const Entry, dev: *DevServer, arena: Allocator, kind: ChunkKind, gpa: Allocator, side: bake.Side) ![]u8 {
|
||||
const map_files = map.files.slice();
|
||||
const paths = map.paths;
|
||||
|
||||
@@ -105,13 +105,22 @@ pub const Entry = struct {
|
||||
|
||||
if (std.fs.path.isAbsolute(path)) {
|
||||
const is_windows_drive_path = Environment.isWindows and path[0] != '/';
|
||||
try source_map_strings.appendSlice(if (is_windows_drive_path)
|
||||
"\"file:///"
|
||||
else
|
||||
"\"file://");
|
||||
|
||||
// On the client we prefix the sourcemap path with "file://" and
|
||||
// percent encode it
|
||||
if (side == .client) {
|
||||
try source_map_strings.appendSlice(if (is_windows_drive_path)
|
||||
"\"file:///"
|
||||
else
|
||||
"\"file://");
|
||||
} else {
|
||||
try source_map_strings.append('"');
|
||||
}
|
||||
|
||||
if (Environment.isWindows and !is_windows_drive_path) {
|
||||
// UNC namespace -> file://server/share/path.ext
|
||||
bun.strings.percentEncodeWrite(
|
||||
encodeSourceMapPath(
|
||||
side,
|
||||
if (path.len > 2 and path[0] == '/' and path[1] == '/')
|
||||
path[2..]
|
||||
else
|
||||
@@ -126,7 +135,7 @@ pub const Entry = struct {
|
||||
// -> file:///path/to/file.js
|
||||
// windows drive letter paths have the extra slash added
|
||||
// -> file:///C:/path/to/file.js
|
||||
bun.strings.percentEncodeWrite(path, &source_map_strings) catch |err| switch (err) {
|
||||
encodeSourceMapPath(side, path, &source_map_strings) catch |err| switch (err) {
|
||||
error.IncompleteUTF8 => @panic("Unexpected: asset with incomplete UTF-8 as file path"),
|
||||
error.OutOfMemory => |e| return e,
|
||||
};
|
||||
@@ -174,14 +183,14 @@ pub const Entry = struct {
|
||||
j.pushStatic(
|
||||
\\],"names":[],"mappings":"AAAA
|
||||
);
|
||||
try joinVLQ(map, kind, &j, arena);
|
||||
try joinVLQ(map, kind, &j, arena, side);
|
||||
|
||||
const json_bytes = try j.doneWithEnd(gpa, "\"}");
|
||||
errdefer @compileError("last try should be the final alloc");
|
||||
|
||||
if (bun.FeatureFlags.bake_debugging_features) if (dev.dump_dir) |dump_dir| {
|
||||
const rel_path_escaped = "latest_chunk.js.map";
|
||||
dumpBundle(dump_dir, .client, rel_path_escaped, json_bytes, false) catch |err| {
|
||||
const rel_path_escaped = if (side == .client) "latest_chunk.js.map" else "latest_hmr.js.map";
|
||||
dumpBundle(dump_dir, if (side == .client) .client else .server, rel_path_escaped, json_bytes, false) catch |err| {
|
||||
bun.handleErrorReturnTrace(err, @errorReturnTrace());
|
||||
Output.warn("Could not dump bundle: {}", .{err});
|
||||
};
|
||||
@@ -190,13 +199,22 @@ pub const Entry = struct {
|
||||
return json_bytes;
|
||||
}
|
||||
|
||||
fn joinVLQ(map: *const Entry, kind: ChunkKind, j: *StringJoiner, arena: Allocator) !void {
|
||||
const map_files = map.files.slice();
|
||||
fn encodeSourceMapPath(
|
||||
side: bake.Side,
|
||||
utf8_input: []const u8,
|
||||
writer: *std.ArrayList(u8),
|
||||
) error{ OutOfMemory, IncompleteUTF8 }!void {
|
||||
// On the client, percent encode everything so it works in the browser
|
||||
if (side == .client) {
|
||||
return bun.strings.percentEncodeWrite(utf8_input, writer);
|
||||
}
|
||||
|
||||
const runtime: bake.HmrRuntime = switch (kind) {
|
||||
.initial_response => bun.bake.getHmrRuntime(.client),
|
||||
.hmr_chunk => comptime .init("self[Symbol.for(\"bun:hmr\")]({\n"),
|
||||
};
|
||||
// On the server, we don't need to do anything
|
||||
try writer.appendSlice(utf8_input);
|
||||
}
|
||||
|
||||
fn joinVLQ(map: *const Entry, kind: ChunkKind, j: *StringJoiner, arena: Allocator, side: bake.Side) !void {
|
||||
const map_files = map.files.slice();
|
||||
|
||||
var prev_end_state: SourceMap.SourceMapState = .{
|
||||
.generated_line = 0,
|
||||
@@ -206,8 +224,20 @@ pub const Entry = struct {
|
||||
.original_column = 0,
|
||||
};
|
||||
|
||||
// +2 because the magic fairy in my dreams said it would align the source maps.
|
||||
var lines_between: u32 = runtime.line_count + 2;
|
||||
var lines_between: u32 = lines_between: {
|
||||
if (side == .client) {
|
||||
const runtime: bake.HmrRuntime = switch (kind) {
|
||||
.initial_response => bun.bake.getHmrRuntime(.client),
|
||||
.hmr_chunk => comptime .init("self[Symbol.for(\"bun:hmr\")]({\n"),
|
||||
};
|
||||
// +2 because the magic fairy in my dreams said it would align the source maps.
|
||||
// TODO: why the fuck is this 2?
|
||||
const lines_between: u32 = runtime.line_count + 2;
|
||||
break :lines_between lines_between;
|
||||
}
|
||||
|
||||
break :lines_between 0;
|
||||
};
|
||||
|
||||
// Join all of the mappings together.
|
||||
for (map_files.items(.tags), map_files.items(.data), 1..) |tag, chunk, source_index| switch (tag) {
|
||||
@@ -223,7 +253,7 @@ pub const Entry = struct {
|
||||
continue;
|
||||
},
|
||||
.ref => {
|
||||
const content = chunk.ref.data;
|
||||
const content: *PackedMap = chunk.ref.data;
|
||||
const start_state: SourceMap.SourceMapState = .{
|
||||
.source_index = @intCast(source_index),
|
||||
.generated_line = @intCast(lines_between),
|
||||
|
||||
17
src/bake/DevServerSourceProvider.cpp
Normal file
17
src/bake/DevServerSourceProvider.cpp
Normal file
@@ -0,0 +1,17 @@
|
||||
#include "DevServerSourceProvider.h"
|
||||
#include "BunBuiltinNames.h"
|
||||
#include "BunString.h"
|
||||
|
||||
// The Zig implementation will be provided to handle registration
|
||||
extern "C" void Bun__addDevServerSourceProvider(void* bun_vm, Bake::DevServerSourceProvider* opaque_source_provider, BunString* specifier);
|
||||
|
||||
// Export functions for Zig to access DevServerSourceProvider
|
||||
extern "C" BunString DevServerSourceProvider__getSourceSlice(Bake::DevServerSourceProvider* provider)
|
||||
{
|
||||
return Bun::toStringView(provider->source());
|
||||
}
|
||||
|
||||
extern "C" Bake::SourceMapData DevServerSourceProvider__getSourceMapJSON(Bake::DevServerSourceProvider* provider)
|
||||
{
|
||||
return provider->sourceMapJSON();
|
||||
}
|
||||
118
src/bake/DevServerSourceProvider.h
Normal file
118
src/bake/DevServerSourceProvider.h
Normal file
@@ -0,0 +1,118 @@
|
||||
#pragma once
|
||||
#include "root.h"
|
||||
#include "headers-handwritten.h"
|
||||
#include "JavaScriptCore/SourceOrigin.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include <mimalloc.h>
|
||||
|
||||
namespace Bake {
|
||||
|
||||
class DevServerSourceProvider;
|
||||
|
||||
class SourceMapJSONString {
|
||||
public:
|
||||
SourceMapJSONString(const char* ptr, size_t length)
|
||||
: m_ptr(ptr)
|
||||
, m_length(length)
|
||||
{
|
||||
}
|
||||
|
||||
~SourceMapJSONString()
|
||||
{
|
||||
if (m_ptr) {
|
||||
mi_free(const_cast<char*>(m_ptr));
|
||||
}
|
||||
}
|
||||
|
||||
// Delete copy constructor and assignment operator to prevent double free
|
||||
SourceMapJSONString(const SourceMapJSONString&) = delete;
|
||||
SourceMapJSONString& operator=(const SourceMapJSONString&) = delete;
|
||||
|
||||
// Move constructor and assignment
|
||||
SourceMapJSONString(SourceMapJSONString&& other) noexcept
|
||||
: m_ptr(other.m_ptr)
|
||||
, m_length(other.m_length)
|
||||
{
|
||||
other.m_ptr = nullptr;
|
||||
other.m_length = 0;
|
||||
}
|
||||
|
||||
SourceMapJSONString& operator=(SourceMapJSONString&& other) noexcept
|
||||
{
|
||||
if (this != &other) {
|
||||
if (m_ptr) {
|
||||
mi_free(const_cast<char*>(m_ptr));
|
||||
}
|
||||
m_ptr = other.m_ptr;
|
||||
m_length = other.m_length;
|
||||
other.m_ptr = nullptr;
|
||||
other.m_length = 0;
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
const char* ptr() const { return m_ptr; }
|
||||
size_t length() const { return m_length; }
|
||||
|
||||
private:
|
||||
const char* m_ptr;
|
||||
size_t m_length;
|
||||
};
|
||||
|
||||
// Struct to return source map data to Zig
|
||||
struct SourceMapData {
|
||||
const char* ptr;
|
||||
size_t length;
|
||||
};
|
||||
|
||||
// Function to be implemented in Zig to register the source provider
|
||||
extern "C" void Bun__addDevServerSourceProvider(void* bun_vm, DevServerSourceProvider* opaque_source_provider, BunString* specifier);
|
||||
|
||||
class DevServerSourceProvider final : public JSC::StringSourceProvider {
|
||||
public:
|
||||
static Ref<DevServerSourceProvider> create(
|
||||
JSC::JSGlobalObject* globalObject,
|
||||
const String& source,
|
||||
const char* sourceMapJSONPtr,
|
||||
size_t sourceMapJSONLength,
|
||||
const JSC::SourceOrigin& sourceOrigin,
|
||||
String&& sourceURL,
|
||||
const TextPosition& startPosition,
|
||||
JSC::SourceProviderSourceType sourceType)
|
||||
{
|
||||
auto provider = adoptRef(*new DevServerSourceProvider(source, sourceMapJSONPtr, sourceMapJSONLength, sourceOrigin, WTFMove(sourceURL), startPosition, sourceType));
|
||||
auto* zigGlobalObject = jsCast<::Zig::GlobalObject*>(globalObject);
|
||||
auto specifier = Bun::toString(provider->sourceURL());
|
||||
Bun__addDevServerSourceProvider(zigGlobalObject->bunVM(), provider.ptr(), &specifier);
|
||||
return provider;
|
||||
}
|
||||
|
||||
SourceMapData sourceMapJSON() const
|
||||
{
|
||||
return SourceMapData { m_sourceMapJSON.ptr(), m_sourceMapJSON.length() };
|
||||
}
|
||||
|
||||
private:
|
||||
DevServerSourceProvider(
|
||||
const String& source,
|
||||
const char* sourceMapJSONPtr,
|
||||
size_t sourceMapJSONLength,
|
||||
const JSC::SourceOrigin& sourceOrigin,
|
||||
String&& sourceURL,
|
||||
const TextPosition& startPosition,
|
||||
JSC::SourceProviderSourceType sourceType)
|
||||
: StringSourceProvider(
|
||||
source,
|
||||
sourceOrigin,
|
||||
JSC::SourceTaintedOrigin::Untainted,
|
||||
WTFMove(sourceURL),
|
||||
startPosition,
|
||||
sourceType)
|
||||
, m_sourceMapJSON(sourceMapJSONPtr, sourceMapJSONLength)
|
||||
{
|
||||
}
|
||||
|
||||
SourceMapJSONString m_sourceMapJSON;
|
||||
};
|
||||
|
||||
} // namespace Bake
|
||||
@@ -83,21 +83,22 @@ export async function render(request: Request, meta: Bake.RouteMetadata): Promis
|
||||
|
||||
// This renders Server Components to a ReadableStream "RSC Payload"
|
||||
let pipe;
|
||||
const signal: MiniAbortSignal = { aborted: false, abort: null! };
|
||||
const signal: MiniAbortSignal = { aborted: undefined, abort: null! };
|
||||
({ pipe, abort: signal.abort } = renderToPipeableStream(page, serverManifest, {
|
||||
onError: err => {
|
||||
// console.error("onError renderToPipeableStream", !!signal.aborted);
|
||||
if (signal.aborted) return;
|
||||
console.error(err);
|
||||
|
||||
// Mark as aborted and call the abort function
|
||||
signal.aborted = err;
|
||||
// @ts-expect-error
|
||||
signal.abort(err);
|
||||
rscPayload.destroy(err);
|
||||
},
|
||||
filterStackFrame: () => false,
|
||||
}));
|
||||
pipe(rscPayload);
|
||||
|
||||
rscPayload.on("error", err => {
|
||||
if (signal.aborted) return;
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
if (skipSSR) {
|
||||
return new Response(rscPayload as any, {
|
||||
status: 200,
|
||||
@@ -106,7 +107,7 @@ export async function render(request: Request, meta: Bake.RouteMetadata): Promis
|
||||
}
|
||||
|
||||
// The RSC payload is rendered into HTML
|
||||
return new Response(await renderToHtml(rscPayload, meta.modules, signal), {
|
||||
return new Response(renderToHtml(rscPayload, meta.modules, signal), {
|
||||
headers: {
|
||||
"Content-Type": "text/html; charset=utf8",
|
||||
},
|
||||
@@ -184,7 +185,7 @@ export const contentTypeToStaticFile = {
|
||||
|
||||
/** Instead of using AbortController, this is used */
|
||||
export interface MiniAbortSignal {
|
||||
aborted: boolean;
|
||||
aborted: Error | undefined;
|
||||
/** Caller must set `aborted` to true before calling. */
|
||||
abort: () => void;
|
||||
}
|
||||
|
||||
@@ -56,6 +56,12 @@ export function renderToHtml(
|
||||
// with `use`, and then returning the parsed React component for the UI.
|
||||
const Root: any = () => React.use(promise);
|
||||
|
||||
// If the signal is already aborted, we should not proceed
|
||||
if (signal.aborted) {
|
||||
controller.close(signal.aborted);
|
||||
return Promise.reject(signal.aborted);
|
||||
}
|
||||
|
||||
// `renderToPipeableStream` is what actually generates HTML.
|
||||
// Here is where React is told what script tags to inject.
|
||||
let pipe: (stream: any) => void;
|
||||
@@ -63,7 +69,14 @@ export function renderToHtml(
|
||||
bootstrapModules,
|
||||
onError(error) {
|
||||
if (!signal.aborted) {
|
||||
console.error(error);
|
||||
// console.error(error);
|
||||
// Abort the rendering and close the stream
|
||||
signal.aborted = error;
|
||||
abort();
|
||||
if (signal.abort) signal.abort();
|
||||
if (stream) {
|
||||
stream.controller.close();
|
||||
}
|
||||
}
|
||||
},
|
||||
}));
|
||||
@@ -74,10 +87,12 @@ export function renderToHtml(
|
||||
// Promise resolved after all data is combined.
|
||||
return stream.finished;
|
||||
},
|
||||
cancel() {
|
||||
signal.aborted = true;
|
||||
signal.abort();
|
||||
abort?.();
|
||||
cancel(err) {
|
||||
if (!signal.aborted) {
|
||||
signal.aborted = err;
|
||||
signal.abort(err);
|
||||
}
|
||||
abort?.(err);
|
||||
},
|
||||
} as Bun.DirectUnderlyingSource as any);
|
||||
}
|
||||
@@ -133,19 +148,28 @@ class RscInjectionStream extends EventEmitter {
|
||||
/** Resolved when all data is written */
|
||||
finished: Promise<void>;
|
||||
finalize: () => void;
|
||||
reject: (err: any) => void;
|
||||
|
||||
constructor(rscPayload: Readable, controller: ReadableStreamDirectController) {
|
||||
super();
|
||||
this.controller = controller;
|
||||
|
||||
const { resolve, promise } = Promise.withResolvers<void>();
|
||||
const { resolve, promise, reject } = Promise.withResolvers<void>();
|
||||
this.finished = promise;
|
||||
this.finalize = resolve;
|
||||
this.reject = reject;
|
||||
|
||||
rscPayload.on("data", this.writeRscData.bind(this));
|
||||
rscPayload.on("end", () => {
|
||||
this.rscHasEnded = true;
|
||||
});
|
||||
rscPayload.on("error", err => {
|
||||
this.rscHasEnded = true;
|
||||
// Close the controller
|
||||
controller.close();
|
||||
// Reject the promise instead of resolving it
|
||||
this.reject(err);
|
||||
});
|
||||
}
|
||||
|
||||
write(data: Uint8Array) {
|
||||
@@ -284,7 +308,7 @@ class StaticRscInjectionStream extends EventEmitter {
|
||||
}
|
||||
|
||||
destroy(error) {
|
||||
console.error(error);
|
||||
// console.error(error);
|
||||
this.reject(error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,6 +88,7 @@ pub const Value = bun.TaggedPointerUnion(.{
|
||||
SavedMappings,
|
||||
SourceProviderMap,
|
||||
BakeSourceProvider,
|
||||
DevServerSourceProvider,
|
||||
});
|
||||
|
||||
pub const MissingSourceMapNoteInfo = struct {
|
||||
@@ -108,6 +109,10 @@ pub fn putBakeSourceProvider(this: *SavedSourceMap, opaque_source_provider: *Bak
|
||||
this.putValue(path, Value.init(opaque_source_provider)) catch bun.outOfMemory();
|
||||
}
|
||||
|
||||
pub fn putDevServerSourceProvider(this: *SavedSourceMap, opaque_source_provider: *DevServerSourceProvider, path: []const u8) void {
|
||||
this.putValue(path, Value.init(opaque_source_provider)) catch bun.outOfMemory();
|
||||
}
|
||||
|
||||
pub fn putZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void {
|
||||
const source_provider: *SourceProviderMap = @ptrCast(opaque_source_provider);
|
||||
this.putValue(path, Value.init(source_provider)) catch bun.outOfMemory();
|
||||
@@ -279,6 +284,33 @@ fn getWithContent(
|
||||
MissingSourceMapNoteInfo.path = storage;
|
||||
return .{};
|
||||
},
|
||||
@field(Value.Tag, @typeName(DevServerSourceProvider)) => {
|
||||
// TODO: This is a copy-paste of above branch
|
||||
const ptr: *DevServerSourceProvider = Value.from(mapping.value_ptr.*).as(DevServerSourceProvider);
|
||||
this.unlock();
|
||||
|
||||
// Do not lock the mutex while we're parsing JSON!
|
||||
if (ptr.getSourceMap(path, .none, hint)) |parse| {
|
||||
if (parse.map) |map| {
|
||||
map.ref();
|
||||
// The mutex is not locked. We have to check the hash table again.
|
||||
this.putValue(path, Value.init(map)) catch bun.outOfMemory();
|
||||
|
||||
return parse;
|
||||
}
|
||||
}
|
||||
|
||||
this.lock();
|
||||
defer this.unlock();
|
||||
// does not have a valid source map. let's not try again
|
||||
_ = this.map.remove(hash);
|
||||
|
||||
// Store path for a user note.
|
||||
const storage = MissingSourceMapNoteInfo.storage[0..path.len];
|
||||
@memcpy(storage, path);
|
||||
MissingSourceMapNoteInfo.path = storage;
|
||||
return .{};
|
||||
},
|
||||
else => {
|
||||
if (Environment.allow_assert) {
|
||||
@panic("Corrupt pointer tag");
|
||||
@@ -333,5 +365,6 @@ const logger = bun.logger;
|
||||
|
||||
const SourceMap = bun.sourcemap;
|
||||
const BakeSourceProvider = bun.sourcemap.BakeSourceProvider;
|
||||
const DevServerSourceProvider = bun.sourcemap.DevServerSourceProvider;
|
||||
const ParsedSourceMap = SourceMap.ParsedSourceMap;
|
||||
const SourceProviderMap = SourceMap.SourceProviderMap;
|
||||
|
||||
@@ -2086,7 +2086,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
|
||||
this.handleRequest(&should_deinit_context, prepared, req, response_value);
|
||||
}
|
||||
|
||||
pub fn onRequestFromSaved(
|
||||
pub fn onSavedRequest(
|
||||
this: *ThisServer,
|
||||
req: SavedRequest.Union,
|
||||
resp: *App.Response,
|
||||
@@ -2874,7 +2874,17 @@ pub const SavedRequest = struct {
|
||||
}
|
||||
|
||||
pub const Union = union(enum) {
|
||||
/// Direct pointer to a µWebSockets request that is still on the stack.
|
||||
/// Used for synchronous request handling where the request can be processed
|
||||
/// immediately within the current call frame. This avoids heap allocation
|
||||
/// and is more efficient for simple, fast operations.
|
||||
stack: *uws.Request,
|
||||
|
||||
/// A heap-allocated copy of the request data that persists beyond the
|
||||
/// initial request handler. Used when request processing needs to be
|
||||
/// deferred (e.g., async operations, waiting for framework initialization).
|
||||
/// Contains strong references to JavaScript objects and all context needed
|
||||
/// to complete the request later.
|
||||
saved: bun.jsc.API.SavedRequest,
|
||||
};
|
||||
};
|
||||
@@ -3147,7 +3157,7 @@ pub const AnyServer = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn onRequestFromSaved(
|
||||
pub fn onSavedRequest(
|
||||
this: AnyServer,
|
||||
req: SavedRequest.Union,
|
||||
resp: uws.AnyResponse,
|
||||
@@ -3156,10 +3166,10 @@ pub const AnyServer = struct {
|
||||
extra_args: [extra_arg_count]JSValue,
|
||||
) void {
|
||||
return switch (this.ptr.tag()) {
|
||||
Ptr.case(HTTPServer) => this.ptr.as(HTTPServer).onRequestFromSaved(req, resp.TCP, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(HTTPSServer) => this.ptr.as(HTTPSServer).onRequestFromSaved(req, resp.SSL, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(DebugHTTPServer) => this.ptr.as(DebugHTTPServer).onRequestFromSaved(req, resp.TCP, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(DebugHTTPSServer) => this.ptr.as(DebugHTTPSServer).onRequestFromSaved(req, resp.SSL, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(HTTPServer) => this.ptr.as(HTTPServer).onSavedRequest(req, resp.TCP, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(HTTPSServer) => this.ptr.as(HTTPSServer).onSavedRequest(req, resp.SSL, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(DebugHTTPServer) => this.ptr.as(DebugHTTPServer).onSavedRequest(req, resp.TCP, callback, extra_arg_count, extra_args),
|
||||
Ptr.case(DebugHTTPSServer) => this.ptr.as(DebugHTTPSServer).onSavedRequest(req, resp.SSL, callback, extra_arg_count, extra_args),
|
||||
else => bun.unreachablePanic("Invalid pointer tag", .{}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1675,6 +1675,55 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
|
||||
var exception_list: std.ArrayList(Api.JsException) = std.ArrayList(Api.JsException).init(req.allocator);
|
||||
defer exception_list.deinit();
|
||||
server.vm.runErrorHandler(err, &exception_list);
|
||||
|
||||
// Render the error fallback HTML page like renderDefaultError does
|
||||
if (!req.flags.has_written_status) {
|
||||
req.flags.has_written_status = true;
|
||||
if (req.resp) |resp| {
|
||||
resp.writeStatus("500 Internal Server Error");
|
||||
resp.writeHeader("content-type", MimeType.html.value);
|
||||
}
|
||||
}
|
||||
|
||||
const allocator = req.allocator;
|
||||
const fallback_container = allocator.create(Api.FallbackMessageContainer) catch unreachable;
|
||||
defer allocator.destroy(fallback_container);
|
||||
|
||||
// Create error message for the stream rejection
|
||||
const error_message = "Stream error during server-side rendering";
|
||||
|
||||
fallback_container.* = Api.FallbackMessageContainer{
|
||||
.message = allocator.dupe(u8, error_message) catch unreachable,
|
||||
.router = null,
|
||||
.reason = .fetch_event_handler,
|
||||
.cwd = server.vm.transpiler.fs.top_level_dir,
|
||||
.problems = Api.Problems{
|
||||
.code = 500,
|
||||
.name = "StreamError",
|
||||
.exceptions = exception_list.items,
|
||||
.build = .{
|
||||
.msgs = &.{},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
var bb = std.ArrayList(u8).init(allocator);
|
||||
defer bb.clearAndFree();
|
||||
const bb_writer = bb.writer();
|
||||
|
||||
Fallback.renderBackend(
|
||||
allocator,
|
||||
fallback_container,
|
||||
@TypeOf(bb_writer),
|
||||
bb_writer,
|
||||
) catch unreachable;
|
||||
|
||||
if (req.resp) |resp| {
|
||||
_ = resp.write(bb.items);
|
||||
}
|
||||
|
||||
req.endStream(req.shouldCloseConnection());
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,6 +173,13 @@ export fn Bun__addBakeSourceProviderSourceMap(vm: *VirtualMachine, opaque_source
|
||||
vm.source_mappings.putBakeSourceProvider(@as(*BakeSourceProvider, @ptrCast(opaque_source_provider)), slice.slice());
|
||||
}
|
||||
|
||||
export fn Bun__addDevServerSourceProvider(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void {
|
||||
var sfb = std.heap.stackFallback(4096, bun.default_allocator);
|
||||
const slice = specifier.toUTF8(sfb.get());
|
||||
defer slice.deinit();
|
||||
vm.source_mappings.putDevServerSourceProvider(@as(*DevServerSourceProvider, @ptrCast(opaque_source_provider)), slice.slice());
|
||||
}
|
||||
|
||||
export fn Bun__addSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void {
|
||||
var sfb = std.heap.stackFallback(4096, bun.default_allocator);
|
||||
const slice = specifier.toUTF8(sfb.get());
|
||||
@@ -209,6 +216,7 @@ const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const BakeSourceProvider = bun.sourcemap.BakeSourceProvider;
|
||||
const DevServerSourceProvider = bun.sourcemap.DevServerSourceProvider;
|
||||
const PluginRunner = bun.transpiler.PluginRunner;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
|
||||
@@ -5890,7 +5890,8 @@ const Tokenizer = struct {
|
||||
}
|
||||
|
||||
pub inline fn sliceFrom(this: *Tokenizer, start: usize) []const u8 {
|
||||
return this.src[start..this.position];
|
||||
const position = this.getPosition();
|
||||
return this.src[start..position];
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -888,15 +888,17 @@ pub const ParsedSourceMap = struct {
|
||||
|
||||
is_standalone_module_graph: bool = false,
|
||||
|
||||
const SourceProviderKind = enum(u1) { zig, bake };
|
||||
const SourceProviderKind = enum(u2) { zig, bake, dev_server };
|
||||
const AnySourceProvider = union(enum) {
|
||||
zig: *SourceProviderMap,
|
||||
bake: *BakeSourceProvider,
|
||||
dev_server: *DevServerSourceProvider,
|
||||
|
||||
pub fn ptr(this: AnySourceProvider) *anyopaque {
|
||||
return switch (this) {
|
||||
.zig => @ptrCast(this.zig),
|
||||
.bake => @ptrCast(this.bake),
|
||||
.dev_server => @ptrCast(this.dev_server),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -909,6 +911,7 @@ pub const ParsedSourceMap = struct {
|
||||
return switch (this) {
|
||||
.zig => this.zig.getSourceMap(source_filename, load_hint, result),
|
||||
.bake => this.bake.getSourceMap(source_filename, load_hint, result),
|
||||
.dev_server => this.dev_server.getSourceMap(source_filename, load_hint, result),
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -916,7 +919,7 @@ pub const ParsedSourceMap = struct {
|
||||
const SourceContentPtr = packed struct(u64) {
|
||||
load_hint: SourceMapLoadHint,
|
||||
kind: SourceProviderKind,
|
||||
data: u61,
|
||||
data: u60,
|
||||
|
||||
pub const none: SourceContentPtr = .{ .load_hint = .none, .kind = .zig, .data = 0 };
|
||||
|
||||
@@ -928,10 +931,15 @@ pub const ParsedSourceMap = struct {
|
||||
return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .bake };
|
||||
}
|
||||
|
||||
fn fromDevServerProvider(p: *DevServerSourceProvider) SourceContentPtr {
|
||||
return .{ .load_hint = .none, .data = @intCast(@intFromPtr(p)), .kind = .dev_server };
|
||||
}
|
||||
|
||||
pub fn provider(sc: SourceContentPtr) ?AnySourceProvider {
|
||||
switch (sc.kind) {
|
||||
.zig => return .{ .zig = @ptrFromInt(sc.data) },
|
||||
.bake => return .{ .bake = @ptrFromInt(sc.data) },
|
||||
.dev_server => return .{ .dev_server = @ptrFromInt(sc.data) },
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1021,9 +1029,10 @@ pub const SourceMapLoadHint = enum(u2) {
|
||||
is_external_map,
|
||||
};
|
||||
|
||||
/// Always returns UTF-8
|
||||
fn findSourceMappingURL(comptime T: type, source: []const T, alloc: std.mem.Allocator) ?bun.jsc.ZigString.Slice {
|
||||
const needle = comptime bun.strings.literal(T, "\n//# sourceMappingURL=");
|
||||
const found = bun.strings.indexOfT(T, source, needle) orelse return null;
|
||||
const found = std.mem.lastIndexOf(T, source, needle) orelse return null;
|
||||
const end = std.mem.indexOfScalarPos(T, source, found + needle.len, '\n') orelse source.len;
|
||||
const url = std.mem.trimRight(T, source[found + needle.len .. end], &.{ ' ', '\r' });
|
||||
return switch (T) {
|
||||
@@ -1036,6 +1045,189 @@ fn findSourceMappingURL(comptime T: type, source: []const T, alloc: std.mem.Allo
|
||||
};
|
||||
}
|
||||
|
||||
fn findSourceMappingURLNah(comptime T: type, source: []const T, alloc: std.mem.Allocator) ?bun.jsc.ZigString.Slice {
|
||||
// According to the spec, we need to find the LAST valid sourceMappingURL
|
||||
// We need to handle both //# and //@ prefixes, and also /* */ comments
|
||||
var last_url: ?bun.jsc.ZigString.Slice = null;
|
||||
var i: usize = 0;
|
||||
|
||||
const solidus = comptime bun.strings.literal(T, "/")[0];
|
||||
const asterisk = comptime bun.strings.literal(T, "*")[0];
|
||||
const newline = comptime bun.strings.literal(T, "\n")[0];
|
||||
const carriage_return = comptime bun.strings.literal(T, "\r")[0];
|
||||
|
||||
// Line terminators as per ECMAScript spec
|
||||
// Note: For UTF-8, these would be multi-byte sequences, so we only check them in UTF-16
|
||||
const line_separator: T = if (T == u16) 0x2028 else newline;
|
||||
const paragraph_separator: T = if (T == u16) 0x2029 else newline;
|
||||
|
||||
while (i < source.len) {
|
||||
// Skip to next potential comment
|
||||
const slash_pos = std.mem.indexOfScalarPos(T, source, i, solidus) orelse break;
|
||||
i = slash_pos + 1;
|
||||
|
||||
if (i >= source.len) break;
|
||||
|
||||
const next_char = source[i];
|
||||
|
||||
// Handle single-line comment //
|
||||
if (next_char == solidus) {
|
||||
i += 1;
|
||||
const comment_start = i;
|
||||
|
||||
// Find end of line
|
||||
var line_end = source.len;
|
||||
var j = comment_start;
|
||||
while (j < source.len) : (j += 1) {
|
||||
const c = source[j];
|
||||
if (c == newline or c == carriage_return or
|
||||
(T == u16 and (c == line_separator or c == paragraph_separator)))
|
||||
{
|
||||
line_end = j;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const comment = source[comment_start..line_end];
|
||||
if (matchSourceMappingURL(T, comment, alloc)) |url| {
|
||||
// Free previous URL if any
|
||||
if (last_url) |prev| prev.deinit();
|
||||
last_url = url;
|
||||
}
|
||||
|
||||
i = line_end;
|
||||
}
|
||||
// Handle multi-line comment /* */
|
||||
else if (next_char == asterisk) {
|
||||
i += 1;
|
||||
const comment_start = i;
|
||||
|
||||
// Find closing */
|
||||
var found_end = false;
|
||||
while (i + 1 < source.len) : (i += 1) {
|
||||
if (source[i] == asterisk and source[i + 1] == solidus) {
|
||||
const comment = source[comment_start..i];
|
||||
if (matchSourceMappingURL(T, comment, alloc)) |url| {
|
||||
// Free previous URL if any
|
||||
if (last_url) |prev| prev.deinit();
|
||||
last_url = url;
|
||||
}
|
||||
i += 2;
|
||||
found_end = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found_end) {
|
||||
// Unclosed comment - ignore rest of file
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Not a comment - check if it's whitespace
|
||||
else {
|
||||
// Back up to check the character before the slash
|
||||
const before_slash = slash_pos;
|
||||
if (before_slash > 0) {
|
||||
var j = before_slash - 1;
|
||||
// Check backwards for non-whitespace on this line
|
||||
while (j > 0) : (j -%= 1) {
|
||||
const c = source[j];
|
||||
if (c == newline or c == carriage_return or
|
||||
(T == u16 and (c == line_separator or c == paragraph_separator)))
|
||||
{
|
||||
// Hit line boundary, this slash starts the line (after whitespace)
|
||||
break;
|
||||
}
|
||||
if (!isWhitespace(T, c)) {
|
||||
// Non-whitespace found - reset last_url per spec
|
||||
if (last_url) |prev| {
|
||||
prev.deinit();
|
||||
last_url = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (j == 0) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return last_url;
|
||||
}
|
||||
|
||||
// Helper function to match sourceMappingURL pattern in a comment
|
||||
fn matchSourceMappingURL(comptime T: type, comment: []const T, alloc: std.mem.Allocator) ?bun.jsc.ZigString.Slice {
|
||||
// Pattern: ^[@#]\s*sourceMappingURL=(\S*?)\s*$
|
||||
var i: usize = 0;
|
||||
|
||||
// Skip leading whitespace
|
||||
while (i < comment.len and isWhitespace(T, comment[i])) : (i += 1) {}
|
||||
|
||||
if (i >= comment.len) return null;
|
||||
|
||||
// Check for @ or # prefix
|
||||
const at_sign = comptime bun.strings.literal(T, "@")[0];
|
||||
const hash = comptime bun.strings.literal(T, "#")[0];
|
||||
|
||||
if (comment[i] != at_sign and comment[i] != hash) return null;
|
||||
i += 1;
|
||||
|
||||
// Skip whitespace after prefix
|
||||
while (i < comment.len and isWhitespace(T, comment[i])) : (i += 1) {}
|
||||
|
||||
// Check for "sourceMappingURL="
|
||||
const mapping_text = comptime bun.strings.literal(T, "sourceMappingURL=");
|
||||
if (i + mapping_text.len > comment.len) return null;
|
||||
|
||||
const text_part = comment[i .. i + mapping_text.len];
|
||||
if (!std.mem.eql(T, text_part, mapping_text)) return null;
|
||||
|
||||
i += mapping_text.len;
|
||||
|
||||
// Find the URL (non-whitespace characters)
|
||||
const url_start = i;
|
||||
while (i < comment.len and !isWhitespace(T, comment[i])) : (i += 1) {}
|
||||
|
||||
if (url_start == i) return null; // Empty URL
|
||||
|
||||
const url = comment[url_start..i];
|
||||
|
||||
// Verify rest is only whitespace
|
||||
while (i < comment.len) : (i += 1) {
|
||||
if (!isWhitespace(T, comment[i])) return null;
|
||||
}
|
||||
|
||||
// Return the URL as a ZigString.Slice
|
||||
return switch (T) {
|
||||
u8 => bun.jsc.ZigString.Slice.fromUTF8NeverFree(url),
|
||||
u16 => bun.jsc.ZigString.Slice.init(
|
||||
alloc,
|
||||
bun.strings.toUTF8Alloc(alloc, url) catch bun.outOfMemory(),
|
||||
),
|
||||
else => @compileError("Not Supported"),
|
||||
};
|
||||
}
|
||||
|
||||
// Helper to check if a character is whitespace
|
||||
fn isWhitespace(comptime T: type, char: T) bool {
|
||||
return switch (char) {
|
||||
'\t', '\n', '\r', ' ', 0x0B, 0x0C => true,
|
||||
else => {
|
||||
if (T == u16) {
|
||||
return switch (char) {
|
||||
0xA0, // non-breaking space
|
||||
0xFEFF, // BOM
|
||||
0x2028, // line separator
|
||||
0x2029, // paragraph separator
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
return false;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/// The last two arguments to this specify loading hints
|
||||
pub fn getSourceMapImpl(
|
||||
comptime SourceProviderKind: type,
|
||||
@@ -1066,29 +1258,61 @@ pub fn getSourceMapImpl(
|
||||
defer source.deref();
|
||||
bun.assert(source.tag == .ZigString);
|
||||
|
||||
const found_url = (if (source.is8Bit())
|
||||
findSourceMappingURL(u8, source.latin1(), allocator)
|
||||
else
|
||||
findSourceMappingURL(u16, source.utf16(), allocator)) orelse
|
||||
break :try_inline;
|
||||
const maybe_found_url = found_url: {
|
||||
if (source.is8Bit())
|
||||
break :found_url findSourceMappingURL(u8, source.latin1(), allocator);
|
||||
|
||||
break :found_url findSourceMappingURL(u16, source.utf16(), allocator);
|
||||
};
|
||||
|
||||
const found_url = maybe_found_url orelse break :try_inline;
|
||||
defer found_url.deinit();
|
||||
|
||||
if (bun.strings.hasPrefixComptime(
|
||||
found_url.slice(),
|
||||
"bake://server.map",
|
||||
)) {}
|
||||
|
||||
const parsed = parseUrl(
|
||||
bun.default_allocator,
|
||||
allocator,
|
||||
found_url.slice(),
|
||||
result,
|
||||
) catch |err| {
|
||||
inline_err = err;
|
||||
break :try_inline;
|
||||
};
|
||||
|
||||
break :parsed .{
|
||||
.is_inline_map,
|
||||
parseUrl(
|
||||
bun.default_allocator,
|
||||
allocator,
|
||||
found_url.slice(),
|
||||
result,
|
||||
) catch |err| {
|
||||
inline_err = err;
|
||||
break :try_inline;
|
||||
},
|
||||
parsed,
|
||||
};
|
||||
}
|
||||
|
||||
// try to load a .map file
|
||||
if (load_hint != .is_inline_map) try_external: {
|
||||
if (comptime SourceProviderKind == DevServerSourceProvider) {
|
||||
// For DevServerSourceProvider, get the source map JSON directly
|
||||
const source_map_data = provider.getSourceMapJSON();
|
||||
|
||||
if (source_map_data.length == 0) {
|
||||
break :try_external;
|
||||
}
|
||||
|
||||
const json_slice = source_map_data.ptr[0..source_map_data.length];
|
||||
|
||||
// Parse the JSON source map
|
||||
break :parsed .{
|
||||
.is_external_map,
|
||||
parseJSON(
|
||||
bun.default_allocator,
|
||||
allocator,
|
||||
json_slice,
|
||||
result,
|
||||
) catch return null,
|
||||
};
|
||||
}
|
||||
|
||||
if (comptime SourceProviderKind == BakeSourceProvider) fallback_to_normal: {
|
||||
const global = bun.jsc.VirtualMachine.get().global;
|
||||
// If we're using bake's production build the global object will
|
||||
@@ -1242,6 +1466,39 @@ pub const BakeSourceProvider = opaque {
|
||||
}
|
||||
};
|
||||
|
||||
pub const DevServerSourceProvider = opaque {
|
||||
pub const SourceMapData = extern struct {
|
||||
ptr: [*]const u8,
|
||||
length: usize,
|
||||
};
|
||||
|
||||
extern fn DevServerSourceProvider__getSourceSlice(*DevServerSourceProvider) bun.String;
|
||||
extern fn DevServerSourceProvider__getSourceMapJSON(*DevServerSourceProvider) SourceMapData;
|
||||
|
||||
pub const getSourceSlice = DevServerSourceProvider__getSourceSlice;
|
||||
pub const getSourceMapJSON = DevServerSourceProvider__getSourceMapJSON;
|
||||
|
||||
pub fn toSourceContentPtr(this: *DevServerSourceProvider) ParsedSourceMap.SourceContentPtr {
|
||||
return ParsedSourceMap.SourceContentPtr.fromDevServerProvider(this);
|
||||
}
|
||||
|
||||
/// The last two arguments to this specify loading hints
|
||||
pub fn getSourceMap(
|
||||
provider: *DevServerSourceProvider,
|
||||
source_filename: []const u8,
|
||||
load_hint: SourceMap.SourceMapLoadHint,
|
||||
result: SourceMap.ParseUrlResultHint,
|
||||
) ?SourceMap.ParseUrl {
|
||||
return getSourceMapImpl(
|
||||
DevServerSourceProvider,
|
||||
provider,
|
||||
source_filename,
|
||||
load_hint,
|
||||
result,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
/// The sourcemap spec says line and column offsets are zero-based
|
||||
pub const LineColumnOffset = struct {
|
||||
/// The zero-based line offset
|
||||
|
||||
150
test/bake/dev/server-sourcemap.test.ts
Normal file
150
test/bake/dev/server-sourcemap.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { expect } from "bun:test";
|
||||
import { devTest } from "../bake-harness";
|
||||
|
||||
devTest("server-side source maps show correct error lines", {
|
||||
files: {
|
||||
"pages/[...slug].tsx": `export default async function MyPage(params) {
|
||||
myFunc();
|
||||
return <h1>{JSON.stringify(params)}</h1>;
|
||||
}
|
||||
|
||||
function myFunc() {
|
||||
throw new Error("Test error for source maps!");
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [
|
||||
{
|
||||
params: {
|
||||
slug: ["test-error"],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}`,
|
||||
},
|
||||
framework: "react",
|
||||
async test(dev) {
|
||||
// Make a request that will trigger the error
|
||||
await dev.fetch("/test-error").catch(() => {});
|
||||
|
||||
// Give it a moment to process the error
|
||||
await Bun.sleep(1000);
|
||||
|
||||
// The output we saw shows the stack trace with correct source mapping
|
||||
// We need to check that the error shows the right file:line:column
|
||||
const lines = dev.output.lines.join("\n");
|
||||
|
||||
// Check that we got the error
|
||||
expect(lines).toContain("Test error for source maps!");
|
||||
|
||||
// Check that the stack trace shows correct file and line numbers
|
||||
// The source maps are working if we see the correct patterns
|
||||
// We need to check for the patterns because ANSI codes might be embedded
|
||||
const hasCorrectThrowLine = lines.includes("myFunc") && lines.includes("7") && lines.includes("9");
|
||||
const hasCorrectCallLine = lines.includes("MyPage") && lines.includes("2") && lines.includes("3");
|
||||
const hasCorrectFileName = lines.includes("/pages/[...slug].tsx");
|
||||
|
||||
expect(hasCorrectThrowLine).toBe(true);
|
||||
expect(hasCorrectCallLine).toBe(true);
|
||||
expect(hasCorrectFileName).toBe(true);
|
||||
},
|
||||
timeoutMultiplier: 2, // Give more time for the test
|
||||
});
|
||||
|
||||
devTest("server-side source maps work with HMR updates", {
|
||||
files: {
|
||||
"pages/error-page.tsx": `export default function ErrorPage() {
|
||||
return <div>Initial content</div>;
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [{ params: {} }],
|
||||
};
|
||||
}`,
|
||||
},
|
||||
framework: "react",
|
||||
async test(dev) {
|
||||
// First fetch should work
|
||||
const response1 = await dev.fetch("/error-page");
|
||||
expect(response1.status).toBe(200);
|
||||
expect(await response1.text()).toContain("Initial content");
|
||||
|
||||
// Update the file to throw an error
|
||||
await dev.write("pages/error-page.tsx", `export default function ErrorPage() {
|
||||
throwError();
|
||||
return <div>Updated content</div>;
|
||||
}
|
||||
|
||||
function throwError() {
|
||||
throw new Error("HMR error test");
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [{ params: {} }],
|
||||
};
|
||||
}`);
|
||||
|
||||
// Wait for the rebuild
|
||||
await dev.waitForHmr();
|
||||
|
||||
// Second fetch should error
|
||||
await dev.fetch("/error-page").catch(() => {});
|
||||
|
||||
// Wait for error output
|
||||
await dev.output.waitForLine(/HMR error test/);
|
||||
|
||||
// Check source map points to correct lines after HMR
|
||||
const lines = dev.output.lines.join("\n");
|
||||
const hasCorrectThrowLine = lines.includes("throwError") && lines.includes("7") && lines.includes("9");
|
||||
const hasCorrectCallLine = lines.includes("ErrorPage") && lines.includes("2") && lines.includes("3");
|
||||
|
||||
expect(hasCorrectThrowLine).toBe(true);
|
||||
expect(hasCorrectCallLine).toBe(true);
|
||||
},
|
||||
});
|
||||
|
||||
devTest("server-side source maps handle nested imports", {
|
||||
files: {
|
||||
"pages/nested.tsx": `import { doSomething } from "../lib/utils";
|
||||
|
||||
export default function NestedPage() {
|
||||
const result = doSomething();
|
||||
return <div>{result}</div>;
|
||||
}
|
||||
|
||||
export async function getStaticPaths() {
|
||||
return {
|
||||
paths: [{ params: {} }],
|
||||
};
|
||||
}`,
|
||||
"lib/utils.ts": `export function doSomething() {
|
||||
return helperFunction();
|
||||
}
|
||||
|
||||
function helperFunction() {
|
||||
throw new Error("Nested error");
|
||||
}`,
|
||||
},
|
||||
framework: "react",
|
||||
async test(dev) {
|
||||
// Make request that triggers error
|
||||
await dev.fetch("/nested").catch(() => {});
|
||||
|
||||
// Wait for error output
|
||||
await dev.output.waitForLine(/Nested error/);
|
||||
|
||||
// Check that stack trace shows both files with correct lines
|
||||
const lines = dev.output.lines.join("\n");
|
||||
const hasUtilsThrowLine = lines.includes("helperFunction") && lines.includes("6") && lines.includes("9");
|
||||
const hasUtilsCallLine = lines.includes("doSomething") && lines.includes("2");
|
||||
const hasPageCallLine = lines.includes("NestedPage") && lines.includes("4");
|
||||
|
||||
expect(hasUtilsThrowLine).toBe(true);
|
||||
expect(hasUtilsCallLine).toBe(true);
|
||||
expect(hasPageCallLine).toBe(true);
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user