mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Add FileRoute for serving files (#20198)
Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com> Co-authored-by: Dylan Conway <dylan.conway567@gmail.com> Co-authored-by: graphite-app[bot] <96075541+graphite-app[bot]@users.noreply.github.com> Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
This commit is contained in:
@@ -56,6 +56,7 @@ src/bun.js/api/JSBundler.zig
|
||||
src/bun.js/api/JSTranspiler.zig
|
||||
src/bun.js/api/server.zig
|
||||
src/bun.js/api/server/AnyRequestContext.zig
|
||||
src/bun.js/api/server/FileRoute.zig
|
||||
src/bun.js/api/server/HTMLBundle.zig
|
||||
src/bun.js/api/server/HTTPStatusText.zig
|
||||
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
|
||||
@@ -473,6 +474,7 @@ src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/futex.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
|
||||
@@ -260,7 +260,7 @@ public:
|
||||
* since written < buffer_len is very likely to be true
|
||||
*/
|
||||
if(written < max_flush_len) {
|
||||
[[likely]]
|
||||
[[likely]]
|
||||
/* Cannot write more at this time, return what we've written so far */
|
||||
return total_written;
|
||||
}
|
||||
|
||||
@@ -456,10 +456,9 @@ private:
|
||||
size_t bufferedAmount = asyncSocket->getBufferedAmount();
|
||||
if (bufferedAmount > 0) {
|
||||
/* Try to flush pending data from the socket's buffer to the network */
|
||||
bufferedAmount -= asyncSocket->flush();
|
||||
|
||||
asyncSocket->flush();
|
||||
/* Check if there's still data waiting to be sent after flush attempt */
|
||||
if (bufferedAmount > 0) {
|
||||
if (asyncSocket->getBufferedAmount() > 0) {
|
||||
/* Socket buffer is not completely empty yet
|
||||
* - Reset the timeout to prevent premature connection closure
|
||||
* - This allows time for another writable event or new request
|
||||
@@ -498,6 +497,7 @@ private:
|
||||
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) == 0) {
|
||||
if (asyncSocket->getBufferedAmount() == 0) {
|
||||
|
||||
asyncSocket->shutdown();
|
||||
/* We need to force close after sending FIN since we want to hinder
|
||||
* clients from keeping to send their huge data */
|
||||
|
||||
@@ -112,7 +112,7 @@ public:
|
||||
* one party must tell the other one so.
|
||||
*
|
||||
* This check also serves to limit writing the header only once. */
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0) {
|
||||
if ((httpResponseData->state & HttpResponseData<SSL>::HTTP_CONNECTION_CLOSE) == 0 && !(httpResponseData->state & (HttpResponseData<SSL>::HTTP_WRITE_CALLED))) {
|
||||
writeHeader("Connection", "close");
|
||||
}
|
||||
|
||||
@@ -132,7 +132,6 @@ public:
|
||||
|
||||
/* Terminating 0 chunk */
|
||||
Super::write("0\r\n\r\n", 5);
|
||||
|
||||
httpResponseData->markDone();
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
@@ -586,7 +585,6 @@ public:
|
||||
if (writtenPtr) {
|
||||
*writtenPtr = total_written;
|
||||
}
|
||||
|
||||
/* If we did not fail the write, accept more */
|
||||
return !has_failed;
|
||||
}
|
||||
|
||||
@@ -118,7 +118,6 @@ public:
|
||||
time_t now = time(0);
|
||||
struct tm tstruct = {};
|
||||
#ifdef _WIN32
|
||||
/* Micro, fucking soft never follows spec. */
|
||||
gmtime_s(&tstruct, &now);
|
||||
#else
|
||||
gmtime_r(&now, &tstruct);
|
||||
|
||||
@@ -61,6 +61,7 @@ pub fn writeStatus(comptime ssl: bool, resp_ptr: ?*uws.NewApp(ssl).Response, sta
|
||||
|
||||
// TODO: rename to StaticBlobRoute? the html bundle is sometimes a static route
|
||||
pub const StaticRoute = @import("./server/StaticRoute.zig");
|
||||
pub const FileRoute = @import("./server/FileRoute.zig");
|
||||
|
||||
const HTMLBundle = JSC.API.HTMLBundle;
|
||||
|
||||
@@ -68,6 +69,8 @@ pub const AnyRoute = union(enum) {
|
||||
/// Serve a static file
|
||||
/// "/robots.txt": new Response(...),
|
||||
static: *StaticRoute,
|
||||
/// Serve a file from disk
|
||||
file: *FileRoute,
|
||||
/// Bundle an HTML import
|
||||
/// import html from "./index.html";
|
||||
/// "/": html,
|
||||
@@ -82,6 +85,7 @@ pub const AnyRoute = union(enum) {
|
||||
pub fn memoryCost(this: AnyRoute) usize {
|
||||
return switch (this) {
|
||||
.static => |static_route| static_route.memoryCost(),
|
||||
.file => |file_route| file_route.memoryCost(),
|
||||
.html => |html_bundle_route| html_bundle_route.data.memoryCost(),
|
||||
.framework_router => @sizeOf(bun.bake.Framework.FileSystemRouterType),
|
||||
};
|
||||
@@ -90,6 +94,7 @@ pub const AnyRoute = union(enum) {
|
||||
pub fn setServer(this: AnyRoute, server: ?AnyServer) void {
|
||||
switch (this) {
|
||||
.static => |static_route| static_route.server = server,
|
||||
.file => |file_route| file_route.server = server,
|
||||
.html => |html_bundle_route| html_bundle_route.server = server,
|
||||
.framework_router => {}, // DevServer contains .server field
|
||||
}
|
||||
@@ -98,6 +103,7 @@ pub const AnyRoute = union(enum) {
|
||||
pub fn deref(this: AnyRoute) void {
|
||||
switch (this) {
|
||||
.static => |static_route| static_route.deref(),
|
||||
.file => |file_route| file_route.deref(),
|
||||
.html => |html_bundle_route| html_bundle_route.deref(),
|
||||
.framework_router => {}, // not reference counted
|
||||
}
|
||||
@@ -106,6 +112,7 @@ pub const AnyRoute = union(enum) {
|
||||
pub fn ref(this: AnyRoute) void {
|
||||
switch (this) {
|
||||
.static => |static_route| static_route.ref(),
|
||||
.file => |file_route| file_route.ref(),
|
||||
.html => |html_bundle_route| html_bundle_route.ref(),
|
||||
.framework_router => {}, // not reference counted
|
||||
}
|
||||
@@ -182,6 +189,9 @@ pub const AnyRoute = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
if (try FileRoute.fromJS(global, argument)) |file_route| {
|
||||
return .{ .file = file_route };
|
||||
}
|
||||
return .{ .static = try StaticRoute.fromJS(global, argument) orelse return null };
|
||||
}
|
||||
};
|
||||
@@ -2511,6 +2521,9 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
|
||||
.static => |static_route| {
|
||||
ServerConfig.applyStaticRoute(any_server, ssl_enabled, app, *StaticRoute, static_route, entry.path, entry.method);
|
||||
},
|
||||
.file => |file_route| {
|
||||
ServerConfig.applyStaticRoute(any_server, ssl_enabled, app, *FileRoute, file_route, entry.path, entry.method);
|
||||
},
|
||||
.html => |html_bundle_route| {
|
||||
ServerConfig.applyStaticRoute(any_server, ssl_enabled, app, *HTMLBundle.Route, html_bundle_route.data, entry.path, entry.method);
|
||||
if (dev_server) |dev| {
|
||||
|
||||
586
src/bun.js/api/server/FileRoute.zig
Normal file
586
src/bun.js/api/server/FileRoute.zig
Normal file
@@ -0,0 +1,586 @@
|
||||
const FileRoute = @This();
|
||||
|
||||
ref_count: RefCount,
|
||||
server: ?AnyServer = null,
|
||||
blob: Blob,
|
||||
headers: Headers = .{ .allocator = bun.default_allocator },
|
||||
status_code: u16,
|
||||
stat_hash: bun.fs.StatHash = .{},
|
||||
has_last_modified_header: bool,
|
||||
has_content_length_header: bool,
|
||||
|
||||
pub const InitOptions = struct {
|
||||
server: ?AnyServer,
|
||||
status_code: u16 = 200,
|
||||
};
|
||||
|
||||
pub fn lastModifiedDate(this: *const FileRoute) ?u64 {
|
||||
if (this.has_last_modified_header) {
|
||||
if (this.headers.get("last-modified")) |last_modified| {
|
||||
var string = bun.String.init(last_modified);
|
||||
defer string.deref();
|
||||
const date_f64 = bun.String.parseDate(&string, bun.JSC.VirtualMachine.get().global);
|
||||
if (!std.math.isNan(date_f64) and std.math.isFinite(date_f64)) {
|
||||
return @intFromFloat(date_f64);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.stat_hash.last_modified_u64 > 0) {
|
||||
return this.stat_hash.last_modified_u64;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn initFromBlob(blob: Blob, opts: InitOptions) *FileRoute {
|
||||
const headers = Headers.from(null, bun.default_allocator, .{ .body = &.{ .Blob = blob } }) catch bun.outOfMemory();
|
||||
return bun.new(FileRoute, .{
|
||||
.ref_count = .init(),
|
||||
.server = opts.server,
|
||||
.blob = blob,
|
||||
.headers = headers,
|
||||
.status_code = opts.status_code,
|
||||
});
|
||||
}
|
||||
|
||||
fn deinit(this: *FileRoute) void {
|
||||
this.blob.deinit();
|
||||
this.headers.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn memoryCost(this: *const FileRoute) usize {
|
||||
return @sizeOf(FileRoute) + this.headers.memoryCost() + this.blob.reported_estimated_size;
|
||||
}
|
||||
|
||||
pub fn fromJS(globalThis: *JSC.JSGlobalObject, argument: JSC.JSValue) bun.JSError!?*FileRoute {
|
||||
if (argument.as(JSC.WebCore.Response)) |response| {
|
||||
response.body.value.toBlobIfPossible();
|
||||
if (response.body.value == .Blob and response.body.value.Blob.needsToReadFile()) {
|
||||
if (response.body.value.Blob.store.?.data.file.pathlike == .fd) {
|
||||
return globalThis.throwTODO("Support serving files from a file descriptor. Please pass a path instead.");
|
||||
}
|
||||
|
||||
var blob = response.body.value.use();
|
||||
|
||||
blob.globalThis = globalThis;
|
||||
blob.allocator = null;
|
||||
response.body.value = .{ .Blob = blob.dupe() };
|
||||
const headers = Headers.from(response.init.headers, bun.default_allocator, .{ .body = &.{ .Blob = blob } }) catch bun.outOfMemory();
|
||||
|
||||
return bun.new(FileRoute, .{
|
||||
.ref_count = .init(),
|
||||
.server = null,
|
||||
.blob = blob,
|
||||
.headers = headers,
|
||||
.has_last_modified_header = headers.get("last-modified") != null,
|
||||
.has_content_length_header = headers.get("content-length") != null,
|
||||
.status_code = response.statusCode(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if (argument.as(Blob)) |blob| {
|
||||
if (blob.needsToReadFile()) {
|
||||
var b = blob.dupe();
|
||||
b.globalThis = globalThis;
|
||||
b.allocator = null;
|
||||
return bun.new(FileRoute, .{
|
||||
.ref_count = .init(),
|
||||
.server = null,
|
||||
.blob = b,
|
||||
.headers = Headers.from(null, bun.default_allocator, .{ .body = &.{ .Blob = b } }) catch bun.outOfMemory(),
|
||||
.has_content_length_header = false,
|
||||
.has_last_modified_header = false,
|
||||
.status_code = 200,
|
||||
});
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn writeHeaders(this: *FileRoute, resp: AnyResponse) void {
|
||||
const entries = this.headers.entries.slice();
|
||||
const names = entries.items(.name);
|
||||
const values = entries.items(.value);
|
||||
const buf = this.headers.buf.items;
|
||||
|
||||
switch (resp) {
|
||||
inline .SSL, .TCP => |s| {
|
||||
for (names, values) |name, value| {
|
||||
s.writeHeader(name.slice(buf), value.slice(buf));
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if (!this.has_last_modified_header) {
|
||||
if (this.stat_hash.lastModified()) |last_modified| {
|
||||
resp.writeHeader("last-modified", last_modified);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.has_content_length_header) {
|
||||
resp.markWroteContentLengthHeader();
|
||||
}
|
||||
}
|
||||
|
||||
fn writeStatusCode(_: *FileRoute, status: u16, resp: AnyResponse) void {
|
||||
switch (resp) {
|
||||
.SSL => |r| writeStatus(true, r, status),
|
||||
.TCP => |r| writeStatus(false, r, status),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onHEADRequest(this: *FileRoute, req: *uws.Request, resp: AnyResponse) void {
|
||||
bun.debugAssert(this.server != null);
|
||||
|
||||
this.on(req, resp, .HEAD);
|
||||
}
|
||||
|
||||
pub fn onRequest(this: *FileRoute, req: *uws.Request, resp: AnyResponse) void {
|
||||
this.on(req, resp, bun.http.Method.find(req.method()) orelse .GET);
|
||||
}
|
||||
|
||||
pub fn on(this: *FileRoute, req: *uws.Request, resp: AnyResponse, method: bun.http.Method) void {
|
||||
bun.debugAssert(this.server != null);
|
||||
this.ref();
|
||||
if (this.server) |server| {
|
||||
server.onPendingRequest();
|
||||
resp.timeout(server.config().idleTimeout);
|
||||
}
|
||||
const path = this.blob.store.?.getPath() orelse {
|
||||
req.setYield(true);
|
||||
this.deref();
|
||||
return;
|
||||
};
|
||||
|
||||
const open_flags = bun.O.RDONLY | bun.O.CLOEXEC | bun.O.NONBLOCK;
|
||||
|
||||
const fd_result = brk: {
|
||||
if (bun.Environment.isWindows) {
|
||||
var path_buffer: bun.PathBuffer = undefined;
|
||||
@memcpy(path_buffer[0..path.len], path);
|
||||
path_buffer[path.len] = 0;
|
||||
break :brk bun.sys.open(
|
||||
path_buffer[0..path.len :0],
|
||||
open_flags,
|
||||
0,
|
||||
);
|
||||
}
|
||||
break :brk bun.sys.openA(
|
||||
path,
|
||||
open_flags,
|
||||
0,
|
||||
);
|
||||
};
|
||||
|
||||
if (fd_result == .err) {
|
||||
req.setYield(true);
|
||||
this.deref();
|
||||
return;
|
||||
}
|
||||
|
||||
const fd = fd_result.result;
|
||||
|
||||
const input_if_modified_since_date: ?u64 = req.dateForHeader("if-modified-since");
|
||||
|
||||
const can_serve_file: bool, const size: u64, const file_type: bun.io.FileType, const pollable: bool = brk: {
|
||||
const stat = switch (bun.sys.fstat(fd)) {
|
||||
.result => |s| s,
|
||||
.err => break :brk .{ false, 0, undefined, false },
|
||||
};
|
||||
|
||||
const stat_size: u64 = @intCast(@max(stat.size, 0));
|
||||
const _size: u64 = @min(stat_size, @as(u64, this.blob.size));
|
||||
|
||||
if (bun.S.ISDIR(@intCast(stat.mode))) {
|
||||
break :brk .{ false, 0, undefined, false };
|
||||
}
|
||||
|
||||
this.stat_hash.hash(stat, path);
|
||||
|
||||
if (bun.S.ISFIFO(@intCast(stat.mode)) or bun.S.ISCHR(@intCast(stat.mode))) {
|
||||
break :brk .{ true, _size, .pipe, true };
|
||||
}
|
||||
|
||||
if (bun.S.ISSOCK(@intCast(stat.mode))) {
|
||||
break :brk .{ true, _size, .socket, true };
|
||||
}
|
||||
|
||||
break :brk .{ true, _size, .file, false };
|
||||
};
|
||||
|
||||
if (!can_serve_file) {
|
||||
bun.Async.Closer.close(fd, if (bun.Environment.isWindows) bun.windows.libuv.Loop.get());
|
||||
req.setYield(true);
|
||||
this.deref();
|
||||
return;
|
||||
}
|
||||
|
||||
const status_code: u16 = brk: {
|
||||
// Unlike If-Unmodified-Since, If-Modified-Since can only be used with a
|
||||
// GET or HEAD. When used in combination with If-None-Match, it is
|
||||
// ignored, unless the server doesn't support If-None-Match.
|
||||
if (input_if_modified_since_date) |requested_if_modified_since| {
|
||||
if (method == .HEAD or method == .GET) {
|
||||
if (this.lastModifiedDate()) |actual_last_modified_at| {
|
||||
if (actual_last_modified_at <= requested_if_modified_since) {
|
||||
break :brk 304;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (size == 0 and file_type == .file and this.status_code == 200) {
|
||||
break :brk 204;
|
||||
}
|
||||
|
||||
break :brk this.status_code;
|
||||
};
|
||||
|
||||
req.setYield(false);
|
||||
|
||||
this.writeStatusCode(status_code, resp);
|
||||
resp.writeMark();
|
||||
this.writeHeaders(resp);
|
||||
|
||||
switch (status_code) {
|
||||
204, 205, 304, 307, 308 => {
|
||||
resp.endWithoutBody(resp.shouldCloseConnection());
|
||||
this.deref();
|
||||
return;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
if (file_type == .file and !resp.state().hasWrittenContentLengthHeader()) {
|
||||
resp.writeHeaderInt("content-length", size);
|
||||
resp.markWroteContentLengthHeader();
|
||||
}
|
||||
|
||||
if (method == .HEAD) {
|
||||
resp.endWithoutBody(resp.shouldCloseConnection());
|
||||
this.deref();
|
||||
return;
|
||||
}
|
||||
|
||||
const transfer = StreamTransfer.create(fd, resp, this, pollable, file_type != .file, file_type);
|
||||
transfer.start(
|
||||
if (file_type == .file) this.blob.offset else 0,
|
||||
if (file_type == .file and this.blob.size > 0) @intCast(size) else null,
|
||||
);
|
||||
}
|
||||
|
||||
fn onResponseComplete(this: *FileRoute, resp: AnyResponse) void {
|
||||
resp.clearAborted();
|
||||
resp.clearOnWritable();
|
||||
resp.clearTimeout();
|
||||
if (this.server) |server| {
|
||||
server.onStaticRequestComplete();
|
||||
}
|
||||
this.deref();
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("bun");
|
||||
const JSC = bun.JSC;
|
||||
const uws = bun.uws;
|
||||
const Headers = bun.http.Headers;
|
||||
const AnyServer = JSC.API.AnyServer;
|
||||
const Blob = JSC.WebCore.Blob;
|
||||
const writeStatus = @import("../server.zig").writeStatus;
|
||||
const AnyResponse = uws.AnyResponse;
|
||||
const Async = bun.Async;
|
||||
const FileType = bun.io.FileType;
|
||||
const Output = bun.Output;
|
||||
|
||||
const StreamTransfer = struct {
|
||||
reader: bun.io.BufferedReader = bun.io.BufferedReader.init(StreamTransfer),
|
||||
fd: bun.FileDescriptor,
|
||||
resp: AnyResponse,
|
||||
route: *FileRoute,
|
||||
|
||||
defer_deinit: ?*bool = null,
|
||||
max_size: ?u64 = null,
|
||||
|
||||
state: packed struct(u8) {
|
||||
waiting_for_readable: bool = false,
|
||||
waiting_for_writable: bool = false,
|
||||
has_ended_response: bool = false,
|
||||
has_reader_closed: bool = false,
|
||||
_: u4 = 0,
|
||||
} = .{},
|
||||
const log = Output.scoped(.StreamTransfer, false);
|
||||
|
||||
pub fn create(
|
||||
fd: bun.FileDescriptor,
|
||||
resp: AnyResponse,
|
||||
route: *FileRoute,
|
||||
pollable: bool,
|
||||
nonblocking: bool,
|
||||
file_type: FileType,
|
||||
) *StreamTransfer {
|
||||
var t = bun.new(StreamTransfer, .{
|
||||
.fd = fd,
|
||||
.resp = resp,
|
||||
.route = route,
|
||||
});
|
||||
t.reader.flags.close_handle = true;
|
||||
t.reader.flags.pollable = pollable;
|
||||
t.reader.flags.nonblocking = nonblocking;
|
||||
if (comptime bun.Environment.isPosix) {
|
||||
if (file_type == .socket) {
|
||||
t.reader.flags.socket = true;
|
||||
}
|
||||
}
|
||||
t.reader.setParent(t);
|
||||
return t;
|
||||
}
|
||||
|
||||
fn start(this: *StreamTransfer, start_offset: usize, size: ?usize) void {
|
||||
log("start", .{});
|
||||
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
this.state.waiting_for_readable = true;
|
||||
this.state.waiting_for_writable = true;
|
||||
this.max_size = size;
|
||||
|
||||
switch (if (start_offset > 0)
|
||||
this.reader.startFileOffset(this.fd, this.reader.flags.pollable, start_offset)
|
||||
else
|
||||
this.reader.start(this.fd, this.reader.flags.pollable)) {
|
||||
.err => {
|
||||
this.finish();
|
||||
return;
|
||||
},
|
||||
.result => {},
|
||||
}
|
||||
|
||||
this.reader.updateRef(true);
|
||||
|
||||
if (bun.Environment.isPosix) {
|
||||
if (this.reader.handle.getPoll()) |poll| {
|
||||
if (this.reader.flags.nonblocking) {
|
||||
poll.flags.insert(.nonblocking);
|
||||
}
|
||||
|
||||
switch (this.reader.getFileType()) {
|
||||
.socket => poll.flags.insert(.socket),
|
||||
.nonblocking_pipe, .pipe => poll.flags.insert(.fifo),
|
||||
.file => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
// the socket maybe open for some time before so we reset the timeout here
|
||||
if (this.route.server) |server| {
|
||||
this.resp.timeout(server.config().idleTimeout);
|
||||
}
|
||||
this.reader.read();
|
||||
|
||||
if (!scope.deinit_called) {
|
||||
// This clones some data so we could avoid that if we're already done.
|
||||
this.resp.onAborted(*StreamTransfer, onAborted, this);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onReadChunk(this: *StreamTransfer, chunk_: []const u8, state_: bun.io.ReadState) bool {
|
||||
log("onReadChunk", .{});
|
||||
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
if (this.state.has_ended_response) {
|
||||
this.state.waiting_for_readable = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
const chunk, const state = brk: {
|
||||
if (this.max_size) |*max_size| {
|
||||
const chunk = chunk_[0..@min(chunk_.len, max_size.*)];
|
||||
max_size.* -|= chunk.len;
|
||||
if (state_ != .eof and max_size.* == 0) {
|
||||
break :brk .{ chunk, .eof };
|
||||
}
|
||||
|
||||
break :brk .{ chunk_, state_ };
|
||||
}
|
||||
|
||||
break :brk .{ chunk_, state_ };
|
||||
};
|
||||
|
||||
if (state == .eof and !this.state.waiting_for_writable) {
|
||||
this.state.waiting_for_readable = false;
|
||||
this.state.has_ended_response = true;
|
||||
const resp = this.resp;
|
||||
const route = this.route;
|
||||
route.onResponseComplete(resp);
|
||||
resp.end(chunk, resp.shouldCloseConnection());
|
||||
log("end: {}", .{chunk.len});
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.route.server) |server| {
|
||||
this.resp.timeout(server.config().idleTimeout);
|
||||
}
|
||||
|
||||
switch (this.resp.write(chunk)) {
|
||||
.backpressure => {
|
||||
this.resp.onWritable(*StreamTransfer, onWritable, this);
|
||||
this.reader.pause();
|
||||
this.resp.markNeedsMore();
|
||||
this.state.waiting_for_writable = true;
|
||||
this.state.waiting_for_readable = false;
|
||||
return false;
|
||||
},
|
||||
.want_more => {
|
||||
this.state.waiting_for_readable = true;
|
||||
this.state.waiting_for_writable = false;
|
||||
|
||||
if (state == .eof) {
|
||||
this.state.waiting_for_readable = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (bun.Environment.isWindows)
|
||||
this.reader.unpause();
|
||||
|
||||
return true;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onReaderDone(this: *StreamTransfer) void {
|
||||
log("onReaderDone", .{});
|
||||
this.state.waiting_for_readable = false;
|
||||
this.state.has_reader_closed = true;
|
||||
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
this.finish();
|
||||
}
|
||||
|
||||
pub fn onReaderError(this: *StreamTransfer, err: bun.sys.Error) void {
|
||||
log("onReaderError {any}", .{err});
|
||||
this.state.waiting_for_readable = false;
|
||||
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
this.finish();
|
||||
}
|
||||
|
||||
pub fn eventLoop(this: *StreamTransfer) JSC.EventLoopHandle {
|
||||
return JSC.EventLoopHandle.init(this.route.server.?.vm().eventLoop());
|
||||
}
|
||||
|
||||
pub fn loop(this: *StreamTransfer) *Async.Loop {
|
||||
return this.eventLoop().loop();
|
||||
}
|
||||
|
||||
fn onWritable(this: *StreamTransfer, _: u64, _: AnyResponse) bool {
|
||||
log("onWritable", .{});
|
||||
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
if (this.reader.isDone()) {
|
||||
@branchHint(.unlikely);
|
||||
log("finish inside onWritable", .{});
|
||||
this.finish();
|
||||
return true;
|
||||
}
|
||||
|
||||
// reset the socket timeout before reading more data
|
||||
if (this.route.server) |server| {
|
||||
this.resp.timeout(server.config().idleTimeout);
|
||||
}
|
||||
|
||||
this.state.waiting_for_writable = false;
|
||||
this.state.waiting_for_readable = true;
|
||||
this.reader.read();
|
||||
return true;
|
||||
}
|
||||
|
||||
fn finish(this: *StreamTransfer) void {
|
||||
log("finish", .{});
|
||||
this.resp.clearOnWritable();
|
||||
this.resp.clearAborted();
|
||||
this.resp.clearTimeout();
|
||||
|
||||
if (!this.state.has_ended_response) {
|
||||
this.state.has_ended_response = true;
|
||||
this.state.waiting_for_writable = false;
|
||||
const resp = this.resp;
|
||||
const route = this.route;
|
||||
route.onResponseComplete(resp);
|
||||
log("endWithoutBody", .{});
|
||||
resp.endWithoutBody(resp.shouldCloseConnection());
|
||||
}
|
||||
|
||||
if (!this.state.has_reader_closed) {
|
||||
this.reader.close();
|
||||
return;
|
||||
}
|
||||
|
||||
this.deinit();
|
||||
}
|
||||
|
||||
fn onAborted(this: *StreamTransfer, _: AnyResponse) void {
|
||||
log("onAborted", .{});
|
||||
var scope: DeinitScope = undefined;
|
||||
scope.enter(this);
|
||||
defer scope.exit();
|
||||
|
||||
this.finish();
|
||||
}
|
||||
|
||||
fn deinit(this: *StreamTransfer) void {
|
||||
if (this.defer_deinit) |defer_deinit| {
|
||||
defer_deinit.* = true;
|
||||
log("deinit deferred", .{});
|
||||
return;
|
||||
}
|
||||
|
||||
log("deinit", .{});
|
||||
this.reader.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
const DeinitScope = struct {
|
||||
stream: *StreamTransfer,
|
||||
prev_defer_deinit: ?*bool,
|
||||
deinit_called: bool = false,
|
||||
|
||||
/// This has to be an instance method to avoid a use-after-stack.
|
||||
pub fn enter(this: *DeinitScope, stream: *StreamTransfer) void {
|
||||
this.stream = stream;
|
||||
this.deinit_called = false;
|
||||
this.prev_defer_deinit = this.stream.defer_deinit;
|
||||
if (this.prev_defer_deinit == null) {
|
||||
this.stream.defer_deinit = &this.deinit_called;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exit(this: *DeinitScope) void {
|
||||
if (this.prev_defer_deinit == null and &this.deinit_called == this.stream.defer_deinit) {
|
||||
this.stream.defer_deinit = this.prev_defer_deinit;
|
||||
|
||||
if (this.deinit_called) {
|
||||
this.stream.deinit();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const RefCount = bun.ptr.RefCount(@This(), "ref_count", deinit, .{});
|
||||
pub const ref = RefCount.ref;
|
||||
pub const deref = RefCount.deref;
|
||||
@@ -17,4 +17,19 @@ pub const WTF = struct {
|
||||
return error.InvalidCharacter;
|
||||
return res;
|
||||
}
|
||||
|
||||
extern fn Bun__writeHTTPDate(buffer: *[32]u8, length: usize, timestampMs: u64) c_int;
|
||||
|
||||
pub fn writeHTTPDate(buffer: *[32]u8, timestampMs: u64) []u8 {
|
||||
if (timestampMs == 0) {
|
||||
return buffer[0..0];
|
||||
}
|
||||
|
||||
const res = Bun__writeHTTPDate(buffer, 32, timestampMs);
|
||||
if (res < 1) {
|
||||
return buffer[0..0];
|
||||
}
|
||||
|
||||
return buffer[0..@intCast(res)];
|
||||
}
|
||||
};
|
||||
|
||||
@@ -987,4 +987,34 @@ CrossOriginResourcePolicy parseCrossOriginResourcePolicyHeader(StringView header
|
||||
return CrossOriginResourcePolicy::Invalid;
|
||||
}
|
||||
|
||||
extern "C" int Bun__writeHTTPDate(char* buffer, size_t length, uint64_t timestampMs)
|
||||
{
|
||||
if (timestampMs == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
time_t timestamp = timestampMs / 1000;
|
||||
struct tm tstruct = {};
|
||||
#ifdef _WIN32
|
||||
gmtime_s(&tstruct, ×tamp);
|
||||
#else
|
||||
gmtime_r(×tamp, &tstruct);
|
||||
#endif
|
||||
static const char wday_name[][4] = {
|
||||
"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"
|
||||
};
|
||||
static const char mon_name[][4] = {
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
|
||||
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"
|
||||
};
|
||||
return snprintf(buffer, length, "%.3s, %.2u %.3s %.4u %.2u:%.2u:%.2u GMT",
|
||||
wday_name[tstruct.tm_wday],
|
||||
tstruct.tm_mday % 99,
|
||||
mon_name[tstruct.tm_mon],
|
||||
(1900 + tstruct.tm_year) % 9999,
|
||||
tstruct.tm_hour % 99,
|
||||
tstruct.tm_min % 99,
|
||||
tstruct.tm_sec % 99);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1212,6 +1212,26 @@ extern "C"
|
||||
}
|
||||
}
|
||||
|
||||
void uws_res_mark_wrote_content_length_header(int ssl, uws_res_r res) {
|
||||
if (ssl) {
|
||||
uWS::HttpResponse<true> *uwsRes = (uWS::HttpResponse<true> *)res;
|
||||
uwsRes->getHttpResponseData()->state |= uWS::HttpResponseData<true>::HTTP_WROTE_CONTENT_LENGTH_HEADER;
|
||||
} else {
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
uwsRes->getHttpResponseData()->state |= uWS::HttpResponseData<false>::HTTP_WROTE_CONTENT_LENGTH_HEADER;
|
||||
}
|
||||
}
|
||||
|
||||
void uws_res_write_mark(int ssl, uws_res_r res) {
|
||||
if (ssl) {
|
||||
uWS::HttpResponse<true> *uwsRes = (uWS::HttpResponse<true> *)res;
|
||||
uwsRes->writeMark();
|
||||
} else {
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
uwsRes->writeMark();
|
||||
}
|
||||
}
|
||||
|
||||
void uws_res_write_header(int ssl, uws_res_r res, const char *key,
|
||||
size_t key_length, const char *value,
|
||||
size_t value_length)
|
||||
|
||||
@@ -25,6 +25,17 @@ pub const Request = opaque {
|
||||
if (len == 0) return null;
|
||||
return ptr[0..len];
|
||||
}
|
||||
pub fn dateForHeader(req: *Request, name: []const u8) ?u64 {
|
||||
const value = header(req, name);
|
||||
if (value == null) return null;
|
||||
var string = bun.String.init(value.?);
|
||||
defer string.deref();
|
||||
const date_f64 = bun.String.parseDate(&string, bun.JSC.VirtualMachine.get().global);
|
||||
if (!std.math.isNan(date_f64) and std.math.isFinite(date_f64)) {
|
||||
return @intFromFloat(date_f64);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
pub fn query(req: *Request, name: []const u8) []const u8 {
|
||||
var ptr: [*]const u8 = undefined;
|
||||
return ptr[0..c.uws_req_get_query(req, name.ptr, name.len, &ptr)];
|
||||
|
||||
@@ -104,6 +104,14 @@ pub fn NewResponse(ssl_flag: i32) type {
|
||||
return c.uws_res_has_responded(ssl_flag, res.downcast());
|
||||
}
|
||||
|
||||
pub fn markWroteContentLengthHeader(res: *Response) void {
|
||||
c.uws_res_mark_wrote_content_length_header(ssl_flag, res.downcast());
|
||||
}
|
||||
|
||||
pub fn writeMark(res: *Response) void {
|
||||
c.uws_res_write_mark(ssl_flag, res.downcast());
|
||||
}
|
||||
|
||||
pub fn getNativeHandle(res: *Response) bun.FileDescriptor {
|
||||
if (comptime Environment.isWindows) {
|
||||
// on windows uSockets exposes SOCKET
|
||||
@@ -306,6 +314,30 @@ pub const AnyResponse = union(enum) {
|
||||
SSL: *uws.NewApp(true).Response,
|
||||
TCP: *uws.NewApp(false).Response,
|
||||
|
||||
pub fn markNeedsMore(this: AnyResponse) void {
|
||||
return switch (this) {
|
||||
inline else => |resp| resp.markNeedsMore(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn markWroteContentLengthHeader(this: AnyResponse) void {
|
||||
return switch (this) {
|
||||
inline else => |resp| resp.markWroteContentLengthHeader(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn writeMark(this: AnyResponse) void {
|
||||
return switch (this) {
|
||||
inline else => |resp| resp.writeMark(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn endSendFile(this: AnyResponse, write_offset: u64, close_connection: bool) void {
|
||||
return switch (this) {
|
||||
inline else => |resp| resp.endSendFile(write_offset, close_connection),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn socket(this: AnyResponse) *c.uws_res {
|
||||
return switch (this) {
|
||||
inline else => |resp| resp.downcast(),
|
||||
@@ -576,6 +608,8 @@ pub const uws_res = c.uws_res;
|
||||
|
||||
const c = struct {
|
||||
pub const uws_res = opaque {};
|
||||
pub extern fn uws_res_mark_wrote_content_length_header(ssl: i32, res: *c.uws_res) void;
|
||||
pub extern fn uws_res_write_mark(ssl: i32, res: *c.uws_res) void;
|
||||
pub extern fn us_socket_mark_needs_more_not_ssl(socket: ?*c.uws_res) void;
|
||||
pub extern fn uws_res_state(ssl: c_int, res: *const c.uws_res) State;
|
||||
pub extern fn uws_res_get_remote_address_info(res: *c.uws_res, dest: *[*]const u8, port: *i32, is_ipv6: *bool) usize;
|
||||
|
||||
@@ -1974,3 +1974,4 @@ pub const Path = struct {
|
||||
// defer std.posix.close(opened);
|
||||
|
||||
// }
|
||||
pub const StatHash = @import("./fs/stat_hash.zig");
|
||||
|
||||
49
src/fs/stat_hash.zig
Normal file
49
src/fs/stat_hash.zig
Normal file
@@ -0,0 +1,49 @@
|
||||
value: u64 = 0,
|
||||
|
||||
last_modified_u64: u64 = 0,
|
||||
last_modified_buffer: [32]u8 = undefined,
|
||||
last_modified_buffer_len: u8 = 0,
|
||||
|
||||
// TODO: add etag support here!
|
||||
|
||||
pub fn hash(this: *@This(), stat: bun.Stat, path: []const u8) void {
|
||||
var stat_hasher = std.hash.XxHash64.init(42);
|
||||
stat_hasher.update(std.mem.asBytes(&stat.size));
|
||||
stat_hasher.update(std.mem.asBytes(&stat.mode));
|
||||
stat_hasher.update(std.mem.asBytes(&stat.mtime()));
|
||||
stat_hasher.update(std.mem.asBytes(&stat.ino));
|
||||
stat_hasher.update(path);
|
||||
|
||||
const prev = this.value;
|
||||
this.value = stat_hasher.final();
|
||||
|
||||
if (prev != this.value and bun.S.ISREG(@intCast(stat.mode))) {
|
||||
const mtime_timespec = stat.mtime();
|
||||
// Clamp negative values to 0 to avoid timestamp overflow issues on Windows
|
||||
const mtime = bun.timespec{
|
||||
.nsec = @intCast(@max(mtime_timespec.nsec, 0)),
|
||||
.sec = @intCast(@max(mtime_timespec.sec, 0)),
|
||||
};
|
||||
if (mtime.ms() > 0) {
|
||||
this.last_modified_buffer_len = @intCast(bun.JSC.wtf.writeHTTPDate(&this.last_modified_buffer, mtime.msUnsigned()).len);
|
||||
this.last_modified_u64 = mtime.msUnsigned();
|
||||
} else {
|
||||
this.last_modified_buffer_len = 0;
|
||||
this.last_modified_u64 = 0;
|
||||
}
|
||||
} else if (!bun.S.ISREG(@intCast(stat.mode))) {
|
||||
this.last_modified_buffer_len = 0;
|
||||
this.last_modified_u64 = 0;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lastModified(this: *const @This()) ?[]const u8 {
|
||||
if (this.last_modified_buffer_len == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.last_modified_buffer[0..this.last_modified_buffer_len];
|
||||
}
|
||||
|
||||
const bun = @import("bun");
|
||||
const std = @import("std");
|
||||
13
src/http.zig
13
src/http.zig
@@ -4794,6 +4794,19 @@ pub const Headers = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get(this: *const Headers, name: []const u8) ?[]const u8 {
|
||||
const entries = this.entries.slice();
|
||||
const names = entries.items(.name);
|
||||
const values = entries.items(.value);
|
||||
for (names, 0..) |name_ptr, i| {
|
||||
if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name_ptr), name, true)) {
|
||||
return this.asStr(values[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn append(this: *Headers, name: []const u8, value: []const u8) !void {
|
||||
var offset: u32 = @truncate(this.buf.items.len);
|
||||
try this.buf.ensureUnusedCapacity(this.allocator, name.len + value.len);
|
||||
|
||||
@@ -147,7 +147,7 @@ const PosixBufferedReader = struct {
|
||||
this.handle = .{ .fd = fd };
|
||||
}
|
||||
|
||||
fn getFileType(this: *const PosixBufferedReader) FileType {
|
||||
pub fn getFileType(this: *const PosixBufferedReader) FileType {
|
||||
const flags = this.flags;
|
||||
if (flags.socket) {
|
||||
return .socket;
|
||||
@@ -183,7 +183,6 @@ const PosixBufferedReader = struct {
|
||||
// No-op on posix.
|
||||
pub fn pause(this: *PosixBufferedReader) void {
|
||||
_ = this; // autofix
|
||||
|
||||
}
|
||||
|
||||
pub fn takeBuffer(this: *PosixBufferedReader) std.ArrayList(u8) {
|
||||
@@ -443,7 +442,8 @@ const PosixBufferedReader = struct {
|
||||
if (bytes_read == 0) {
|
||||
// EOF - finished and closed pipe
|
||||
parent.closeWithoutReporting();
|
||||
parent.done();
|
||||
if (!parent.flags.is_done)
|
||||
parent.done();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -474,7 +474,8 @@ const PosixBufferedReader = struct {
|
||||
|
||||
if (bytes_read == 0) {
|
||||
parent.closeWithoutReporting();
|
||||
parent.done();
|
||||
if (!parent.flags.is_done)
|
||||
parent.done();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -531,7 +532,8 @@ const PosixBufferedReader = struct {
|
||||
parent.closeWithoutReporting();
|
||||
if (stack_buffer[0 .. stack_buffer.len - stack_buffer_head.len].len > 0)
|
||||
_ = parent.vtable.onReadChunk(stack_buffer[0 .. stack_buffer.len - stack_buffer_head.len], .eof);
|
||||
parent.done();
|
||||
if (!parent.flags.is_done)
|
||||
parent.done();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -590,7 +592,8 @@ const PosixBufferedReader = struct {
|
||||
if (bytes_read == 0) {
|
||||
parent.closeWithoutReporting();
|
||||
_ = drainChunk(parent, resizable_buffer.items, .eof);
|
||||
parent.done();
|
||||
if (!parent.flags.is_done)
|
||||
parent.done();
|
||||
return;
|
||||
}
|
||||
},
|
||||
@@ -625,7 +628,8 @@ const PosixBufferedReader = struct {
|
||||
if (bytes_read == 0) {
|
||||
parent.closeWithoutReporting();
|
||||
_ = drainChunk(parent, resizable_buffer.items, .eof);
|
||||
parent.done();
|
||||
if (!parent.flags.is_done)
|
||||
parent.done();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -891,11 +895,11 @@ pub const WindowsBufferedReader = struct {
|
||||
MaxBuf.removeFromPipereader(&this.maxbuf);
|
||||
this.buffer().deinit();
|
||||
const source = this.source orelse return;
|
||||
this.source = null;
|
||||
if (!source.isClosed()) {
|
||||
// closeImpl will take care of freeing the source
|
||||
this.closeImpl(false);
|
||||
}
|
||||
this.source = null;
|
||||
}
|
||||
|
||||
pub fn setRawMode(this: *WindowsBufferedReader, value: bool) bun.JSC.Maybe(void) {
|
||||
@@ -1056,9 +1060,9 @@ pub const WindowsBufferedReader = struct {
|
||||
switch (source) {
|
||||
.sync_file, .file => |file| {
|
||||
if (!this.flags.is_paused) {
|
||||
this.flags.is_paused = true;
|
||||
// always cancel the current one
|
||||
file.fs.cancel();
|
||||
this.flags.is_paused = true;
|
||||
}
|
||||
// always use close_fs here because we can have a operation in progress
|
||||
file.close_fs.data = file;
|
||||
@@ -1066,6 +1070,7 @@ pub const WindowsBufferedReader = struct {
|
||||
},
|
||||
.pipe => |pipe| {
|
||||
pipe.data = pipe;
|
||||
this.flags.is_paused = true;
|
||||
pipe.close(onPipeClose);
|
||||
},
|
||||
.tty => |tty| {
|
||||
@@ -1075,6 +1080,7 @@ pub const WindowsBufferedReader = struct {
|
||||
}
|
||||
|
||||
tty.data = tty;
|
||||
this.flags.is_paused = true;
|
||||
tty.close(onTTYClose);
|
||||
},
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ const words: Record<string, { reason: string; limit?: number; regex?: boolean }>
|
||||
"== alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" },
|
||||
"!= alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" },
|
||||
|
||||
[String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 241, regex: true },
|
||||
[String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 242, regex: true },
|
||||
"usingnamespace": { reason: "Zig 0.15 will remove `usingnamespace`" },
|
||||
"catch unreachable": { reason: "For out-of-memory, prefer 'catch bun.outOfMemory()'", limit: 1854 },
|
||||
|
||||
|
||||
592
test/js/bun/http/bun-serve-file.test.ts
Normal file
592
test/js/bun/http/bun-serve-file.test.ts
Normal file
@@ -0,0 +1,592 @@
|
||||
import type { Server } from "bun";
|
||||
import { afterAll, beforeAll, describe, expect, it, mock, test } from "bun:test";
|
||||
import { isWindows, rmScope, tempDirWithFiles } from "harness";
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
const LARGE_SIZE = 1024 * 1024 * 8;
|
||||
const files = {
|
||||
"hello.txt": "Hello, World!",
|
||||
"empty.txt": "",
|
||||
"binary.bin": Buffer.from([0x00, 0x01, 0x02, 0x03, 0xff, 0xfe, 0xfd]),
|
||||
"large.txt": Buffer.alloc(LARGE_SIZE, "bun").toString(),
|
||||
"unicode.txt": "Hello 世界 🌍 émojis",
|
||||
"json.json": JSON.stringify({ message: "test", number: 42 }),
|
||||
"nested/file.txt": "nested content",
|
||||
"special chars & symbols.txt": "special file content",
|
||||
"will-be-deleted.txt": "will be deleted",
|
||||
"partial.txt": "0123456789ABCDEF",
|
||||
};
|
||||
|
||||
describe("Bun.file in serve routes", () => {
|
||||
let server: Server;
|
||||
let tempDir: string;
|
||||
let handler = mock(req => {
|
||||
return new Response(`fallback: ${req.url}`, {
|
||||
headers: {
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
tempDir = tempDirWithFiles("bun-serve-file-test-", files);
|
||||
|
||||
const routes = {
|
||||
"/hello.txt": {
|
||||
GET: new Response(Bun.file(join(tempDir, "hello.txt"))),
|
||||
HEAD: new Response(Bun.file(join(tempDir, "hello.txt"))),
|
||||
},
|
||||
"/empty.txt": new Response(Bun.file(join(tempDir, "empty.txt"))),
|
||||
"/empty-400.txt": new Response(Bun.file(join(tempDir, "empty.txt")), {
|
||||
status: 400,
|
||||
}),
|
||||
"/binary.bin": new Response(Bun.file(join(tempDir, "binary.bin"))),
|
||||
"/large.txt": new Response(Bun.file(join(tempDir, "large.txt"))),
|
||||
"/unicode.txt": new Response(Bun.file(join(tempDir, "unicode.txt"))),
|
||||
"/json.json": new Response(Bun.file(join(tempDir, "json.json"))),
|
||||
"/nested/file.txt": new Response(Bun.file(join(tempDir, "nested", "file.txt"))),
|
||||
"/special-chars.txt": new Response(Bun.file(join(tempDir, "special chars & symbols.txt"))),
|
||||
"/nonexistent.txt": new Response(Bun.file(join(tempDir, "does-not-exist.txt"))),
|
||||
"/with-headers.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
headers: {
|
||||
"X-Custom-Header": "custom-value",
|
||||
"Cache-Control": "max-age=3600",
|
||||
},
|
||||
}),
|
||||
"/with-status.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
status: 201,
|
||||
statusText: "Created",
|
||||
}),
|
||||
"/will-be-deleted.txt": new Response(Bun.file(join(tempDir, "will-be-deleted.txt"))),
|
||||
"/custom-last-modified.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
headers: {
|
||||
"Last-Modified": "Wed, 21 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
}),
|
||||
"/partial.txt": new Response(Bun.file(join(tempDir, "partial.txt"))),
|
||||
"/partial-slice.txt": new Response(Bun.file(join(tempDir, "partial.txt")).slice(5, 10)),
|
||||
"/fd-not-supported.txt": (() => {
|
||||
// This would test file descriptors, but they're not supported yet
|
||||
return new Response(Bun.file(join(tempDir, "hello.txt")));
|
||||
})(),
|
||||
} as const;
|
||||
|
||||
server = Bun.serve({
|
||||
routes: routes,
|
||||
port: 0,
|
||||
fetch: handler,
|
||||
});
|
||||
server.unref();
|
||||
|
||||
unlinkSync(join(tempDir, "will-be-deleted.txt"));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
server?.stop(true);
|
||||
using _ = rmScope(tempDir);
|
||||
});
|
||||
|
||||
describe("Basic file serving", () => {
|
||||
it("serves text file", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
const headers = res.headers.toJSON();
|
||||
if (!new Date(headers["last-modified"]!).getTime()) {
|
||||
throw new Error("Last-Modified header is not a valid date");
|
||||
}
|
||||
|
||||
if (!new Date(headers["date"]!).getTime()) {
|
||||
throw new Error("Date header is not a valid date");
|
||||
}
|
||||
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
// Snapshot the headers so a test fails if we change the headers later.
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "13",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves empty file", async () => {
|
||||
const res = await fetch(new URL(`/empty.txt`, server.url));
|
||||
expect(res.status).toBe(204);
|
||||
expect(await res.text()).toBe("");
|
||||
// A server MUST NOT send a Content-Length header field in any response
|
||||
// with a status code of 1xx (Informational) or 204 (No Content). A server
|
||||
// MUST NOT send a Content-Length header field in any 2xx (Successful)
|
||||
// response to a CONNECT request (Section 9.3.6).
|
||||
expect(res.headers.get("Content-Length")).toBeNull();
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves empty file with custom status code", async () => {
|
||||
const res = await fetch(new URL(`/empty-400.txt`, server.url));
|
||||
expect(res.status).toBe(400);
|
||||
expect(await res.text()).toBe("");
|
||||
expect(res.headers.get("Content-Length")).toBe("0");
|
||||
});
|
||||
|
||||
it("serves binary file", async () => {
|
||||
const res = await fetch(new URL(`/binary.bin`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
const bytes = await res.bytes();
|
||||
expect(bytes).toEqual(new Uint8Array([0x00, 0x01, 0x02, 0x03, 0xff, 0xfe, 0xfd]));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/octet-stream/);
|
||||
});
|
||||
|
||||
it("serves large file", async () => {
|
||||
const res = await fetch(new URL(`/large.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
const text = await res.text();
|
||||
expect(text).toHaveLength(LARGE_SIZE);
|
||||
|
||||
if (files["large.txt"] !== text) {
|
||||
console.log("Expected length:", files["large.txt"].length);
|
||||
console.log("Actual length:", text.length);
|
||||
console.log("First 100 chars expected:", files["large.txt"].slice(0, 100));
|
||||
console.log("First 100 chars actual:", text.slice(0, 100));
|
||||
console.log("Last 100 chars expected:", files["large.txt"].slice(-100));
|
||||
console.log("Last 100 chars actual:", text.slice(-100));
|
||||
|
||||
// Find first difference
|
||||
for (let i = 0; i < Math.min(files["large.txt"].length, text.length); i++) {
|
||||
if (files["large.txt"][i] !== text[i]) {
|
||||
console.log(`First difference at index ${i}:`);
|
||||
console.log(`Expected: "${files["large.txt"][i]}" (code: ${files["large.txt"].charCodeAt(i)})`);
|
||||
console.log(`Actual: "${text[i]}" (code: ${text.charCodeAt(i)})`);
|
||||
console.log(`Context around difference: "${files["large.txt"].slice(Math.max(0, i - 10), i + 10)}"`);
|
||||
console.log(`Actual context: "${text.slice(Math.max(0, i - 10), i + 10)}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
throw new Error("large.txt is not the same");
|
||||
}
|
||||
|
||||
expect(res.headers.get("Content-Length")).toBe(LARGE_SIZE.toString());
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "${LARGE_SIZE}",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves unicode file", async () => {
|
||||
const res = await fetch(new URL(`/unicode.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello 世界 🌍 émojis");
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "25",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves JSON file with correct content type", async () => {
|
||||
const res = await fetch(new URL(`/json.json`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.json()).toEqual({ message: "test", number: 42 });
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/json/);
|
||||
});
|
||||
|
||||
it("serves nested file", async () => {
|
||||
const res = await fetch(new URL(`/nested/file.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("nested content");
|
||||
});
|
||||
|
||||
it("serves file with special characters in name", async () => {
|
||||
const res = await fetch(new URL(`/special-chars.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("special file content");
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP methods", () => {
|
||||
it("supports HEAD requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "HEAD" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("");
|
||||
expect(res.headers.get("Content-Length")).toBe("13"); // "Hello, World!" length
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
});
|
||||
|
||||
it("supports GET requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "GET" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Custom headers and status", () => {
|
||||
it("preserves custom headers", async () => {
|
||||
const res = await fetch(new URL(`/with-headers.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
expect(res.headers.get("X-Custom-Header")).toBe("custom-value");
|
||||
expect(res.headers.get("Cache-Control")).toBe("max-age=3600");
|
||||
});
|
||||
|
||||
it("preserves custom status", async () => {
|
||||
const res = await fetch(new URL(`/with-status.txt`, server.url));
|
||||
expect(res.status).toBe(201);
|
||||
expect(res.statusText).toBe("Created");
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error handling", () => {
|
||||
it("handles nonexistent files gracefully", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/nonexistent.txt`, server.url));
|
||||
|
||||
// Should fall back to the handler since file doesn't exist
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}nonexistent.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe.todo("Range requests", () => {
|
||||
it("supports partial content requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"Range": "bytes=0-4",
|
||||
},
|
||||
});
|
||||
|
||||
if (res.status === 206) {
|
||||
expect(await res.text()).toBe("Hello");
|
||||
expect(res.headers.get("Content-Range")).toMatch(/bytes 0-4\/13/);
|
||||
expect(res.headers.get("Accept-Ranges")).toBe("bytes");
|
||||
} else {
|
||||
// If range requests aren't supported, should return full content
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
}
|
||||
});
|
||||
|
||||
it("handles invalid range requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"Range": "bytes=20-30", // Beyond file size
|
||||
},
|
||||
});
|
||||
|
||||
// Should either return 416 Range Not Satisfiable or 200 with full content
|
||||
expect([200, 416]).toContain(res.status);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Conditional requests", () => {
|
||||
describe.each(["GET", "HEAD"])("%s", method => {
|
||||
it(`handles If-Modified-Since with future date (304)`, async () => {
|
||||
// First request to get Last-Modified
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
expect(lastModified).not.toBeEmpty();
|
||||
|
||||
// If-Modified-Since is AFTER the file's last modified date (future)
|
||||
// Should return 304 because file hasn't been modified since that future date
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method,
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
|
||||
it(`handles If-Modified-Since with past date (200)`, async () => {
|
||||
// If-Modified-Since is way in the past
|
||||
// Should return 200 because file has been modified since then
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method,
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.now() - 1000000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
it("ignores If-Modified-Since for non-GET/HEAD requests", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
// Should not return 304 for POST
|
||||
expect(res2.status).not.toBe(304);
|
||||
});
|
||||
|
||||
it.todo("handles ETag", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const etag = res1.headers.get("ETag");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"If-None-Match": etag!,
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Stress testing", () => {
|
||||
test.each(["hello.txt", "large.txt"])(
|
||||
"concurrent requests for %s",
|
||||
async filename => {
|
||||
const batchSize = isWindows ? 8 : 32;
|
||||
const iterations = isWindows ? 2 : 5;
|
||||
|
||||
async function iterate() {
|
||||
const promises = Array.from({ length: batchSize }, () =>
|
||||
fetch(`${server.url}${filename}`).then(res => {
|
||||
expect(res.status).toBe(200);
|
||||
return res.text();
|
||||
}),
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Verify all responses are identical
|
||||
const expected = results[0];
|
||||
results.forEach(result => {
|
||||
expect(result).toBe(expected);
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
await iterate();
|
||||
Bun.gc();
|
||||
}
|
||||
},
|
||||
30000,
|
||||
);
|
||||
|
||||
it("memory usage stays reasonable", async () => {
|
||||
Bun.gc(true);
|
||||
const baseline = (process.memoryUsage.rss() / 1024 / 1024) | 0;
|
||||
|
||||
// Make many requests to large file
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const res = await fetch(new URL(`/large.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
await res.text(); // Consume the response
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
const final = (process.memoryUsage.rss() / 1024 / 1024) | 0;
|
||||
const delta = final - baseline;
|
||||
|
||||
expect(delta).toBeLessThan(100); // Should not leak significant memory
|
||||
}, 30000);
|
||||
|
||||
it("deleted file goes to handler", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/will-be-deleted.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}will-be-deleted.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Handler fallback", () => {
|
||||
it("falls back to handler for unmatched routes", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/not-in-routes.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}not-in-routes.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
|
||||
it("does not call handler for matched file routes", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Last-Modified header handling", () => {
|
||||
it("automatically adds Last-Modified header", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res.headers.get("Last-Modified");
|
||||
expect(lastModified).not.toBeNull();
|
||||
expect(lastModified).toMatch(/^[A-Za-z]{3}, \d{2} [A-Za-z]{3} \d{4} \d{2}:\d{2}:\d{2} GMT$/);
|
||||
});
|
||||
|
||||
it("respects custom Last-Modified header", async () => {
|
||||
const res = await fetch(new URL(`/custom-last-modified.txt`, server.url));
|
||||
expect(res.headers.get("Last-Modified")).toBe("Wed, 21 Oct 2015 07:28:00 GMT");
|
||||
});
|
||||
|
||||
it("uses custom Last-Modified for If-Modified-Since checks", async () => {
|
||||
// Request with If-Modified-Since after custom date
|
||||
const res1 = await fetch(new URL(`/custom-last-modified.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": "Thu, 22 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
});
|
||||
expect(res1.status).toBe(304);
|
||||
|
||||
// Request with If-Modified-Since before custom date
|
||||
const res2 = await fetch(new URL(`/custom-last-modified.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": "Tue, 20 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
});
|
||||
expect(res2.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File slicing", () => {
|
||||
it("serves complete file", async () => {
|
||||
const res = await fetch(new URL(`/partial.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("0123456789ABCDEF");
|
||||
expect(res.headers.get("Content-Length")).toBe("16");
|
||||
});
|
||||
|
||||
it("serves sliced file", async () => {
|
||||
const res = await fetch(new URL(`/partial-slice.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("56789");
|
||||
expect(res.headers.get("Content-Length")).toBe("5");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Special status codes", () => {
|
||||
it("returns 204 for empty files with 200 status", async () => {
|
||||
const res = await fetch(new URL(`/empty.txt`, server.url));
|
||||
expect(res.status).toBe(204);
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("preserves custom status for empty files", async () => {
|
||||
const res = await fetch(new URL(`/empty-400.txt`, server.url));
|
||||
expect(res.status).toBe(400);
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("returns appropriate status for 304 responses", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(res2.headers.get("Content-Length")).toBeNull();
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Streaming and file types", () => {
|
||||
it("sets Content-Length for regular files", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.headers.get("Content-Length")).toBe("13");
|
||||
});
|
||||
|
||||
it("handles HEAD requests with proper headers", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "HEAD" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get("Content-Length")).toBe("13");
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
expect(res.headers.get("Last-Modified")).not.toBeNull();
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("handles abort/cancellation gracefully", async () => {
|
||||
const controller = new AbortController();
|
||||
const promise = fetch(new URL(`/large.txt`, server.url), {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
// Abort immediately
|
||||
controller.abort();
|
||||
|
||||
await expect(promise).rejects.toThrow(/abort/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File not found handling", () => {
|
||||
it("falls back to handler when file doesn't exist", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/nonexistent.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}nonexistent.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
|
||||
it("falls back to handler when file is deleted after route creation", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/will-be-deleted.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}will-be-deleted.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Content-Type detection", () => {
|
||||
it("detects text/plain for .txt files", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
});
|
||||
|
||||
it("detects application/json for .json files", async () => {
|
||||
const res = await fetch(new URL(`/json.json`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/json/);
|
||||
});
|
||||
|
||||
it("detects application/octet-stream for binary files", async () => {
|
||||
const res = await fetch(new URL(`/binary.bin`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/octet-stream/);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user