mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Dynamic routes resolve! Just gotta pass query params to the JS correctly
Former-commit-id: 2c4d3184a4f06ba9dd7d27d653834589796ebd0f
This commit is contained in:
9
demos/css-stress-test/pages/[id]/boom.tsx
Normal file
9
demos/css-stress-test/pages/[id]/boom.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Main } from "../../src/main";
|
||||
|
||||
export default function IndexRoute() {
|
||||
return (
|
||||
<div>
|
||||
<Main productName={"Boom id"} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
9
demos/css-stress-test/pages/plain/nested.tsx
Normal file
9
demos/css-stress-test/pages/plain/nested.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Main } from "../../src/main";
|
||||
|
||||
export default function IndexRoute() {
|
||||
return (
|
||||
<div>
|
||||
<Main productName={"nested!"} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
83
src/http.zig
83
src/http.zig
@@ -56,80 +56,7 @@ pub fn println(comptime fmt: string, args: anytype) void {
|
||||
}
|
||||
|
||||
const HTTPStatusCode = u10;
|
||||
|
||||
pub const URLPath = struct {
|
||||
extname: string = "",
|
||||
path: string = "",
|
||||
pathname: string = "",
|
||||
first_segment: string = "",
|
||||
query_string: string = "",
|
||||
|
||||
// This does one pass over the URL path instead of like 4
|
||||
pub fn parse(raw_path: string) URLPath {
|
||||
var question_mark_i: i16 = -1;
|
||||
var period_i: i16 = -1;
|
||||
var first_segment_end: i16 = std.math.maxInt(i16);
|
||||
var last_slash: i16 = -1;
|
||||
|
||||
var i: i16 = @intCast(i16, raw_path.len) - 1;
|
||||
while (i >= 0) : (i -= 1) {
|
||||
const c = raw_path[@intCast(usize, i)];
|
||||
|
||||
switch (c) {
|
||||
'?' => {
|
||||
question_mark_i = std.math.max(question_mark_i, i);
|
||||
if (question_mark_i < period_i) {
|
||||
period_i = -1;
|
||||
}
|
||||
|
||||
if (last_slash > question_mark_i) {
|
||||
last_slash = -1;
|
||||
}
|
||||
},
|
||||
'.' => {
|
||||
period_i = std.math.max(period_i, i);
|
||||
},
|
||||
'/' => {
|
||||
last_slash = std.math.max(last_slash, i);
|
||||
|
||||
if (i > 0) {
|
||||
first_segment_end = std.math.min(first_segment_end, i);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (last_slash > period_i) {
|
||||
period_i = -1;
|
||||
}
|
||||
|
||||
const extname = brk: {
|
||||
if (question_mark_i > -1 and period_i > -1) {
|
||||
period_i += 1;
|
||||
break :brk raw_path[@intCast(usize, period_i)..@intCast(usize, question_mark_i)];
|
||||
} else if (period_i > -1) {
|
||||
period_i += 1;
|
||||
break :brk raw_path[@intCast(usize, period_i)..];
|
||||
} else {
|
||||
break :brk &([_]u8{});
|
||||
}
|
||||
};
|
||||
|
||||
const path = if (question_mark_i < 0) raw_path[1..] else raw_path[1..@intCast(usize, question_mark_i)];
|
||||
|
||||
const first_segment = raw_path[1..std.math.min(@intCast(usize, first_segment_end), raw_path.len)];
|
||||
|
||||
return URLPath{
|
||||
.extname = extname,
|
||||
.pathname = raw_path,
|
||||
.first_segment = first_segment,
|
||||
.path = if (raw_path.len == 1) "." else path,
|
||||
.query_string = if (question_mark_i > -1) raw_path[@intCast(usize, question_mark_i)..@intCast(usize, raw_path.len)] else "",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const URLPath = @import("./http/url_path.zig");
|
||||
pub const Method = enum {
|
||||
GET,
|
||||
HEAD,
|
||||
@@ -754,17 +681,23 @@ pub const RequestContext = struct {
|
||||
js_ast.Expr.Data.Store.reset();
|
||||
}
|
||||
var handler: *JavaScriptHandler = try channel.readItem();
|
||||
|
||||
try JavaScript.EventListenerMixin.emitFetchEvent(vm, &handler.ctx);
|
||||
}
|
||||
}
|
||||
|
||||
var one: [1]*JavaScriptHandler = undefined;
|
||||
pub fn enqueue(ctx: *RequestContext, server: *Server) !void {
|
||||
pub fn enqueue(ctx: *RequestContext, server: *Server, filepath_buf: []u8) !void {
|
||||
var clone = try ctx.allocator.create(JavaScriptHandler);
|
||||
clone.ctx = ctx.*;
|
||||
clone.conn = ctx.conn.*;
|
||||
clone.ctx.conn = &clone.conn;
|
||||
|
||||
// it's a dead pointer now
|
||||
clone.ctx.matched_route.?.file_path = filepath_buf[0..ctx.matched_route.?.file_path.len];
|
||||
// this copy may be unnecessary, i'm not 100% sure where when
|
||||
std.mem.copy(u8, &clone.ctx.match_file_path_buf, filepath_buf[0..ctx.matched_route.?.file_path.len]);
|
||||
|
||||
if (!has_loaded_channel) {
|
||||
has_loaded_channel = true;
|
||||
channel = Channel.init();
|
||||
|
||||
75
src/http/url_path.zig
Normal file
75
src/http/url_path.zig
Normal file
@@ -0,0 +1,75 @@
|
||||
usingnamespace @import("../global.zig");
|
||||
const std = @import("std");
|
||||
|
||||
const URLPath = @This();
|
||||
|
||||
extname: string = "",
|
||||
path: string = "",
|
||||
pathname: string = "",
|
||||
first_segment: string = "",
|
||||
query_string: string = "",
|
||||
|
||||
// This does one pass over the URL path instead of like 4
|
||||
pub fn parse(raw_path: string) URLPath {
|
||||
var question_mark_i: i16 = -1;
|
||||
var period_i: i16 = -1;
|
||||
var first_segment_end: i16 = std.math.maxInt(i16);
|
||||
var last_slash: i16 = -1;
|
||||
|
||||
var i: i16 = @intCast(i16, raw_path.len) - 1;
|
||||
while (i >= 0) : (i -= 1) {
|
||||
const c = raw_path[@intCast(usize, i)];
|
||||
|
||||
switch (c) {
|
||||
'?' => {
|
||||
question_mark_i = std.math.max(question_mark_i, i);
|
||||
if (question_mark_i < period_i) {
|
||||
period_i = -1;
|
||||
}
|
||||
|
||||
if (last_slash > question_mark_i) {
|
||||
last_slash = -1;
|
||||
}
|
||||
},
|
||||
'.' => {
|
||||
period_i = std.math.max(period_i, i);
|
||||
},
|
||||
'/' => {
|
||||
last_slash = std.math.max(last_slash, i);
|
||||
|
||||
if (i > 0) {
|
||||
first_segment_end = std.math.min(first_segment_end, i);
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (last_slash > period_i) {
|
||||
period_i = -1;
|
||||
}
|
||||
|
||||
const extname = brk: {
|
||||
if (question_mark_i > -1 and period_i > -1) {
|
||||
period_i += 1;
|
||||
break :brk raw_path[@intCast(usize, period_i)..@intCast(usize, question_mark_i)];
|
||||
} else if (period_i > -1) {
|
||||
period_i += 1;
|
||||
break :brk raw_path[@intCast(usize, period_i)..];
|
||||
} else {
|
||||
break :brk &([_]u8{});
|
||||
}
|
||||
};
|
||||
|
||||
const path = if (question_mark_i < 0) raw_path[1..] else raw_path[1..@intCast(usize, question_mark_i)];
|
||||
|
||||
const first_segment = raw_path[1..std.math.min(@intCast(usize, first_segment_end), raw_path.len)];
|
||||
|
||||
return URLPath{
|
||||
.extname = extname,
|
||||
.pathname = raw_path,
|
||||
.first_segment = first_segment,
|
||||
.path = if (raw_path.len == 1) "." else path,
|
||||
.query_string = if (question_mark_i > -1) raw_path[@intCast(usize, question_mark_i)..@intCast(usize, raw_path.len)] else "",
|
||||
};
|
||||
}
|
||||
@@ -28,7 +28,8 @@ pub const QueryStringMap = struct {
|
||||
}
|
||||
|
||||
pub const Iterator = struct {
|
||||
// Assume no query string param will exceed 2048 keys
|
||||
// Assume no query string param map will exceed 2048 keys
|
||||
// Browsers typically limit URL lengths to around 64k
|
||||
const VisitedMap = std.bit_set.ArrayBitSet(usize, 2048);
|
||||
|
||||
i: usize = 0,
|
||||
|
||||
74
src/resolver/dir_info.zig
Normal file
74
src/resolver/dir_info.zig
Normal file
@@ -0,0 +1,74 @@
|
||||
usingnamespace @import("../global.zig");
|
||||
|
||||
const allocators = @import("../allocators.zig");
|
||||
const DirInfo = @This();
|
||||
const Fs = @import("../fs.zig");
|
||||
const TSConfigJSON = @import("./tsconfig_json.zig").TSConfigJSON;
|
||||
const PackageJSON = @import("./package_json.zig").PackageJSON;
|
||||
|
||||
pub const Index = allocators.IndexType;
|
||||
|
||||
// These objects are immutable, so we can just point to the parent directory
|
||||
// and avoid having to lock the cache again
|
||||
parent: Index = allocators.NotFound,
|
||||
|
||||
// A pointer to the enclosing dirInfo with a valid "browser" field in
|
||||
// package.json. We need this to remap paths after they have been resolved.
|
||||
enclosing_browser_scope: Index = allocators.NotFound,
|
||||
|
||||
abs_path: string = "",
|
||||
entries: Index = undefined,
|
||||
has_node_modules: bool = false, // Is there a "node_modules" subdirectory?
|
||||
package_json: ?*PackageJSON = null, // Is there a "package.json" file?
|
||||
tsconfig_json: ?*TSConfigJSON = null, // Is there a "tsconfig.json" file in this directory or a parent directory?
|
||||
abs_real_path: string = "", // If non-empty, this is the real absolute path resolving any symlinks
|
||||
|
||||
pub fn getFileDescriptor(dirinfo: *const DirInfo) StoredFileDescriptorType {
|
||||
if (!FeatureFlags.store_file_descriptors) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (dirinfo.getEntries()) |entries| {
|
||||
return entries.fd;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEntries(dirinfo: *const DirInfo) ?*Fs.FileSystem.DirEntry {
|
||||
var entries_ptr = Fs.FileSystem.instance.fs.entries.atIndex(dirinfo.entries) orelse return null;
|
||||
switch (entries_ptr.*) {
|
||||
.entries => |entr| {
|
||||
return &entries_ptr.entries;
|
||||
},
|
||||
.err => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEntriesConst(dirinfo: *const DirInfo) ?*const Fs.FileSystem.DirEntry {
|
||||
const entries_ptr = Fs.FileSystem.instance.fs.entries.atIndex(dirinfo.entries) orelse return null;
|
||||
switch (entries_ptr.*) {
|
||||
.entries => |entr| {
|
||||
return &entries_ptr.entries;
|
||||
},
|
||||
.err => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getParent(i: *const DirInfo) ?*DirInfo {
|
||||
return HashMap.instance.atIndex(i.parent);
|
||||
}
|
||||
pub fn getEnclosingBrowserScope(i: *const DirInfo) ?*DirInfo {
|
||||
return HashMap.instance.atIndex(i.enclosing_browser_scope);
|
||||
}
|
||||
|
||||
// Goal: Really fast, low allocation directory map exploiting cache locality where we don't worry about lifetimes much.
|
||||
// 1. Don't store the keys or values of directories that don't exist
|
||||
// 2. Don't expect a provided key to exist after it's queried
|
||||
// 3. Store whether a directory has been queried and whether that query was successful.
|
||||
// 4. Allocate onto the https://en.wikipedia.org/wiki/.bss#BSS_in_C instead of the heap, so we can avoid memory leaks
|
||||
pub const HashMap = allocators.BSSMap(DirInfo, Fs.Preallocate.Counts.dir_entry, false, 128);
|
||||
@@ -664,7 +664,7 @@ pub fn joinStringBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [
|
||||
const platform = comptime _platform.resolve();
|
||||
|
||||
for (_parts) |part| {
|
||||
if (part.len == 0 or (part.len == 1 and part[1] == '.')) {
|
||||
if (part.len == 0 or (part.len == 1 and part[0] == '.')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ const sync = @import("../sync.zig");
|
||||
const TSConfigJSON = @import("./tsconfig_json.zig").TSConfigJSON;
|
||||
const PackageJSON = @import("./package_json.zig").PackageJSON;
|
||||
usingnamespace @import("./data_url.zig");
|
||||
pub const DirInfo = @import("./dir_info.zig");
|
||||
|
||||
const Wyhash = std.hash.Wyhash;
|
||||
|
||||
@@ -34,74 +35,6 @@ pub const SideEffectsData = struct {
|
||||
is_side_effects_array_in_json: bool = false,
|
||||
};
|
||||
|
||||
pub const DirInfo = struct {
|
||||
pub const Index = allocators.IndexType;
|
||||
|
||||
// These objects are immutable, so we can just point to the parent directory
|
||||
// and avoid having to lock the cache again
|
||||
parent: Index = allocators.NotFound,
|
||||
|
||||
// A pointer to the enclosing dirInfo with a valid "browser" field in
|
||||
// package.json. We need this to remap paths after they have been resolved.
|
||||
enclosing_browser_scope: Index = allocators.NotFound,
|
||||
|
||||
abs_path: string = "",
|
||||
entries: Index = undefined,
|
||||
has_node_modules: bool = false, // Is there a "node_modules" subdirectory?
|
||||
package_json: ?*PackageJSON = null, // Is there a "package.json" file?
|
||||
tsconfig_json: ?*TSConfigJSON = null, // Is there a "tsconfig.json" file in this directory or a parent directory?
|
||||
abs_real_path: string = "", // If non-empty, this is the real absolute path resolving any symlinks
|
||||
|
||||
pub fn getFileDescriptor(dirinfo: *const DirInfo) StoredFileDescriptorType {
|
||||
if (!FeatureFlags.store_file_descriptors) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (dirinfo.getEntries()) |entries| {
|
||||
return entries.fd;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEntries(dirinfo: *const DirInfo) ?*Fs.FileSystem.DirEntry {
|
||||
var entries_ptr = Fs.FileSystem.instance.fs.entries.atIndex(dirinfo.entries) orelse return null;
|
||||
switch (entries_ptr.*) {
|
||||
.entries => |entr| {
|
||||
return &entries_ptr.entries;
|
||||
},
|
||||
.err => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getEntriesConst(dirinfo: *const DirInfo) ?*const Fs.FileSystem.DirEntry {
|
||||
const entries_ptr = Fs.FileSystem.instance.fs.entries.atIndex(dirinfo.entries) orelse return null;
|
||||
switch (entries_ptr.*) {
|
||||
.entries => |entr| {
|
||||
return &entries_ptr.entries;
|
||||
},
|
||||
.err => {
|
||||
return null;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getParent(i: *const DirInfo) ?*DirInfo {
|
||||
return HashMap.instance.atIndex(i.parent);
|
||||
}
|
||||
pub fn getEnclosingBrowserScope(i: *const DirInfo) ?*DirInfo {
|
||||
return HashMap.instance.atIndex(i.enclosing_browser_scope);
|
||||
}
|
||||
|
||||
// Goal: Really fast, low allocation directory map exploiting cache locality where we don't worry about lifetimes much.
|
||||
// 1. Don't store the keys or values of directories that don't exist
|
||||
// 2. Don't expect a provided key to exist after it's queried
|
||||
// 3. Store whether a directory has been queried and whether that query was successful.
|
||||
// 4. Allocate onto the https://en.wikipedia.org/wiki/.bss#BSS_in_C instead of the heap, so we can avoid memory leaks
|
||||
pub const HashMap = allocators.BSSMap(DirInfo, Fs.Preallocate.Counts.dir_entry, false, 128);
|
||||
};
|
||||
pub const TemporaryBuffer = struct {
|
||||
pub threadlocal var ExtensionPathBuf: [512]u8 = undefined;
|
||||
pub threadlocal var TSConfigMatchStarBuf: [512]u8 = undefined;
|
||||
|
||||
111
src/router.zig
111
src/router.zig
@@ -6,12 +6,13 @@
|
||||
const Router = @This();
|
||||
|
||||
const std = @import("std");
|
||||
const DirInfo = @import("./resolver/resolver.zig").DirInfo;
|
||||
usingnamespace @import("global.zig");
|
||||
|
||||
const DirInfo = @import("./resolver/dir_info.zig");
|
||||
const Fs = @import("./fs.zig");
|
||||
const Options = @import("./options.zig");
|
||||
const allocators = @import("./allocators.zig");
|
||||
const URLPath = @import("./http.zig").URLPath;
|
||||
const URLPath = @import("./http/url_path.zig");
|
||||
|
||||
const index_route_hash = @truncate(u32, std.hash.Wyhash.hash(0, "index"));
|
||||
const arbitrary_max_route = 4096;
|
||||
@@ -118,13 +119,14 @@ pub fn loadRoutes(
|
||||
|
||||
var route: Route = Route.parse(
|
||||
entry.base,
|
||||
entry.dir[this.config.dir.len..],
|
||||
Fs.PathName.init(entry.dir[this.config.dir.len..]).dirWithTrailingSlash(),
|
||||
"",
|
||||
entry_ptr.value,
|
||||
);
|
||||
|
||||
route.parent = parent;
|
||||
route.children.offset = @truncate(u16, this.routes.routes.len);
|
||||
|
||||
route.children.offset = @truncate(u16, this.routes.routes.len + 1);
|
||||
try this.routes.routes.append(this.allocator, route);
|
||||
|
||||
// potential stack overflow!
|
||||
@@ -136,7 +138,7 @@ pub fn loadRoutes(
|
||||
false,
|
||||
);
|
||||
|
||||
this.routes.routes.items(.children)[route.children.offset].len = @truncate(u16, this.routes.routes.len) - route.children.offset;
|
||||
this.routes.routes.items(.children)[route.children.offset - 1].len = @truncate(u16, this.routes.routes.len) - route.children.offset;
|
||||
}
|
||||
},
|
||||
|
||||
@@ -149,7 +151,8 @@ pub fn loadRoutes(
|
||||
if (strings.eql(extname[1..], _extname)) {
|
||||
var route = Route.parse(
|
||||
entry.base,
|
||||
entry.dir[this.config.dir.len..],
|
||||
// we extend the pointer length by one to get it's slash
|
||||
entry.dir.ptr[this.config.dir.len..entry.dir.len],
|
||||
extname,
|
||||
entry_ptr.value,
|
||||
);
|
||||
@@ -217,10 +220,14 @@ pub const Route = struct {
|
||||
pub const Ptr = TinyPtr;
|
||||
|
||||
pub fn parse(base: string, dir: string, extname: string, entry_index: allocators.IndexType) Route {
|
||||
var parts = [_]string{ dir, base };
|
||||
const ensure_slash = if (dir.len > 0 and dir[dir.len - 1] != '/') "/" else "";
|
||||
|
||||
var parts = [3]string{ dir, ensure_slash, base };
|
||||
// this isn't really absolute, it's relative to the pages dir
|
||||
const absolute = Fs.FileSystem.instance.abs(&parts);
|
||||
const absolute = Fs.FileSystem.instance.join(&parts);
|
||||
const name = base[0 .. base.len - extname.len];
|
||||
const start_index: usize = if (absolute[0] == '/') 1 else 0;
|
||||
var hash_path = absolute[start_index .. absolute.len - extname.len];
|
||||
|
||||
return Route{
|
||||
.name = name,
|
||||
@@ -237,7 +244,7 @@ pub const Route = struct {
|
||||
u32,
|
||||
std.hash.Wyhash.hash(
|
||||
0,
|
||||
absolute[0 .. absolute.len - extname.len],
|
||||
hash_path,
|
||||
),
|
||||
),
|
||||
.part = RoutePart.parse(name),
|
||||
@@ -326,8 +333,7 @@ pub const RouteMap = struct {
|
||||
redirect_path: ?string = "",
|
||||
url_path: URLPath,
|
||||
|
||||
matched_route_name: PathBuilder = PathBuilder.init(),
|
||||
matched_route_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined,
|
||||
matched_route_buf: []u8 = undefined,
|
||||
|
||||
file_path: string = "",
|
||||
|
||||
@@ -335,32 +341,19 @@ pub const RouteMap = struct {
|
||||
this: *MatchContext,
|
||||
head_i: u16,
|
||||
segment_i: u16,
|
||||
) ?Match {
|
||||
if (this.segments.len == 0) return null;
|
||||
|
||||
const _match = this._matchDynamicRoute(head_i, segment_i) orelse return null;
|
||||
this.matched_route_name.append("/");
|
||||
this.matched_route_name.append(_match.name);
|
||||
return _match;
|
||||
}
|
||||
|
||||
fn _matchDynamicRoute(
|
||||
this: *MatchContext,
|
||||
head_i: u16,
|
||||
segment_i: u16,
|
||||
) ?Match {
|
||||
const start_len = this.params.len;
|
||||
var head = this.map.routes.get(head_i);
|
||||
const segment: string = this.segments[segment_i];
|
||||
const remaining: []string = this.segments[segment_i..];
|
||||
const remaining: []string = this.segments[segment_i + 1 ..];
|
||||
|
||||
if (remaining.len > 0 and head.children.len == 0) {
|
||||
if ((remaining.len > 0 and head.children.len == 0)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (head.part.tag) {
|
||||
.exact => {
|
||||
if (this.hashes[segment_i] != head.hash) {
|
||||
// is it the end of an exact match?
|
||||
if (!(this.hashes.len > segment_i and this.hashes[segment_i] == head.hash)) {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
@@ -391,16 +384,17 @@ pub const RouteMap = struct {
|
||||
} else {
|
||||
if (Fs.FileSystem.DirEntry.EntryStore.instance.at(head.entry_index)) |entry| {
|
||||
var parts = [_]string{ entry.dir, entry.base };
|
||||
const file_path = Fs.FileSystem.instance.absBuf(&parts, this.matched_route_buf);
|
||||
|
||||
match_result = Match{
|
||||
.path = head.path,
|
||||
.name = head.name,
|
||||
.name = file_path,
|
||||
.params = this.params,
|
||||
.hash = head.full_hash,
|
||||
.query_string = this.url_path.query_string,
|
||||
.pathname = this.url_path.pathname,
|
||||
.file_path = Fs.FileSystem.instance.absBuf(&parts, &this.matched_route_buf),
|
||||
.basename = entry.base,
|
||||
.file_path = file_path,
|
||||
};
|
||||
|
||||
this.matched_route_buf[match_result.file_path.len] = 0;
|
||||
@@ -414,7 +408,7 @@ pub const RouteMap = struct {
|
||||
this.allocator,
|
||||
Param{
|
||||
.key = head.part.str(head.name),
|
||||
.value = segment,
|
||||
.value = this.segments[segment_i],
|
||||
.kind = head.part.tag,
|
||||
},
|
||||
) catch unreachable;
|
||||
@@ -428,7 +422,7 @@ pub const RouteMap = struct {
|
||||
|
||||
// This makes many passes over the list of routes
|
||||
// However, most of those passes are basically array.indexOf(number) and then smallerArray.indexOf(number)
|
||||
pub fn matchPage(this: *RouteMap, file_path_buf: []u8, url_path: URLPath, params: *Param.List) ?Match {
|
||||
pub fn matchPage(this: *RouteMap, routes_dir: string, file_path_buf: []u8, url_path: URLPath, params: *Param.List) ?Match {
|
||||
// Trim trailing slash
|
||||
var path = url_path.path;
|
||||
var redirect = false;
|
||||
@@ -479,24 +473,25 @@ pub const RouteMap = struct {
|
||||
}
|
||||
|
||||
const full_hash = @truncate(u32, std.hash.Wyhash.hash(0, path));
|
||||
const routes_slice = this.routes.slice();
|
||||
|
||||
// Check for an exact match
|
||||
// These means there are no params.
|
||||
if (std.mem.indexOfScalar(u32, this.routes.items(.full_hash), full_hash)) |exact_match| {
|
||||
if (std.mem.indexOfScalar(u32, routes_slice.items(.full_hash), full_hash)) |exact_match| {
|
||||
const route = this.routes.get(exact_match);
|
||||
// It might be a folder with an index route
|
||||
// /bacon/index.js => /bacon
|
||||
if (route.children.len > 0) {
|
||||
const children = this.routes.items(.hash)[route.children.offset .. route.children.offset + route.children.len];
|
||||
const children = routes_slice.items(.hash)[route.children.offset .. route.children.offset + route.children.len];
|
||||
for (children) |child_hash, i| {
|
||||
if (child_hash == index_route_hash) {
|
||||
const entry = Fs.FileSystem.DirEntry.EntryStore.instance.at(this.routes.items(.entry_index)[i + route.children.offset]).?;
|
||||
const entry = Fs.FileSystem.DirEntry.EntryStore.instance.at(routes_slice.items(.entry_index)[i + route.children.offset]).?;
|
||||
const parts = [_]string{ entry.dir, entry.base };
|
||||
|
||||
return Match{
|
||||
.params = params,
|
||||
.name = this.routes.items(.name)[i],
|
||||
.path = this.routes.items(.path)[i],
|
||||
.name = routes_slice.items(.name)[i],
|
||||
.path = routes_slice.items(.path)[i],
|
||||
.pathname = url_path.pathname,
|
||||
.basename = entry.base,
|
||||
.hash = child_hash,
|
||||
@@ -528,22 +523,25 @@ pub const RouteMap = struct {
|
||||
var segments: []string = segments_buf[0..];
|
||||
var hashes: []u32 = segments_hash[0..];
|
||||
var segment_i: usize = 0;
|
||||
for (path) |i, c| {
|
||||
if (c == '/') {
|
||||
// if the URL is /foo/./foo
|
||||
// rewrite it as /foo/foo
|
||||
segments[segment_i] = path[last_slash_i..i];
|
||||
hashes[segment_i] = @truncate(u32, std.hash.Wyhash.hash(0, segments[segment_i]));
|
||||
|
||||
if (!(segments[segment_i].len == 1 and segments[segment_i][0] == '.')) {
|
||||
segment_i += 1;
|
||||
}
|
||||
|
||||
last_slash_i = i + 1;
|
||||
}
|
||||
var splitter = std.mem.tokenize(path, "/");
|
||||
while (splitter.next()) |part| {
|
||||
if (part.len == 0 or (part.len == 1 and part[0] == '.')) continue;
|
||||
segments[segment_i] = part;
|
||||
hashes[segment_i] = @truncate(u32, std.hash.Wyhash.hash(0, part));
|
||||
segment_i += 1;
|
||||
}
|
||||
segments = segments[0..segment_i];
|
||||
hashes = hashes[0..segment_i];
|
||||
|
||||
// Now, we've established that there is no exact match.
|
||||
// Something will be dynamic
|
||||
// There are three tricky things about this.
|
||||
// 1. It's possible that the correct route is a catch-all route or an optional catch-all route.
|
||||
// 2. Given routes like this:
|
||||
// * [name]/[id]
|
||||
// * foo/[id]
|
||||
// If the URL is /foo/123
|
||||
// Then the correct route is foo/[id]
|
||||
var ctx = MatchContext{
|
||||
.params = params,
|
||||
.segments = segments,
|
||||
@@ -552,11 +550,17 @@ pub const RouteMap = struct {
|
||||
.redirect_path = if (redirect) path else null,
|
||||
.allocator = this.allocator,
|
||||
.url_path = url_path,
|
||||
.matched_route_buf = file_path_buf,
|
||||
};
|
||||
|
||||
if (ctx.matchDynamicRoute(0, 0)) |_dynamic_route| {
|
||||
// route name == the filesystem path relative to the pages dir excluding the file extension
|
||||
var dynamic_route = _dynamic_route;
|
||||
dynamic_route.name = ctx.matched_route_name.str();
|
||||
dynamic_route.name = dynamic_route.name[this.config.dir.len..];
|
||||
dynamic_route.name = dynamic_route.name[0 .. dynamic_route.name.len - std.fs.path.extension(dynamic_route.file_path).len];
|
||||
std.debug.assert(dynamic_route.name.len > 0);
|
||||
if (dynamic_route.name[0] == '/') dynamic_route.name = dynamic_route.name[1..];
|
||||
|
||||
return dynamic_route;
|
||||
}
|
||||
|
||||
@@ -649,7 +653,8 @@ pub fn match(app: *Router, server: anytype, comptime RequestContextType: type, c
|
||||
}
|
||||
|
||||
params_list.shrinkRetainingCapacity(0);
|
||||
if (app.routes.matchPage(&ctx.match_file_path_buf, ctx.url, ¶ms_list)) |route| {
|
||||
var filepath_buf = std.mem.span(&ctx.match_file_path_buf);
|
||||
if (app.routes.matchPage(app.config.dir, filepath_buf, ctx.url, ¶ms_list)) |route| {
|
||||
if (route.redirect_path) |redirect| {
|
||||
try ctx.handleRedirect(redirect);
|
||||
return;
|
||||
@@ -664,7 +669,7 @@ pub fn match(app: *Router, server: anytype, comptime RequestContextType: type, c
|
||||
}
|
||||
|
||||
ctx.matched_route = route;
|
||||
RequestContextType.JavaScriptHandler.enqueue(ctx, server) catch {
|
||||
RequestContextType.JavaScriptHandler.enqueue(ctx, server, filepath_buf) catch {
|
||||
server.javascript_enabled = false;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -12,16 +12,14 @@ pub const eql = std.meta.eql;
|
||||
pub fn NewStringBuilder(comptime size: usize) type {
|
||||
return struct {
|
||||
const This = @This();
|
||||
buffer: [size + 1]u8 = undefined,
|
||||
buffer: [*]u8 = undefined,
|
||||
remain: []u8 = undefined,
|
||||
|
||||
pub fn init() This {
|
||||
var instance = This{};
|
||||
instance.load();
|
||||
return instance;
|
||||
return This{};
|
||||
}
|
||||
|
||||
fn load(this: *This) void {
|
||||
pub fn load(this: *This) void {
|
||||
this.remain = (&this.buffer)[0..size];
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user