mirror of
https://github.com/oven-sh/bun
synced 2026-02-16 22:01:47 +00:00
Merge branch 'main' of github.com:oven-sh/bun into ali/piscina
This commit is contained in:
@@ -2025,7 +2025,7 @@ describe("bundler", () => {
|
||||
itBundled("edgecase/NoOutWithTwoFiles", {
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
import index from './index.html'
|
||||
import index from './index.html' with { type: 'file' }
|
||||
console.log(index);
|
||||
`,
|
||||
"/index.html": `
|
||||
@@ -2051,7 +2051,7 @@ describe("bundler", () => {
|
||||
itBundled("edgecase/OutWithTwoFiles", {
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
import index from './index.html'
|
||||
import index from './index.html' with { type: 'file' }
|
||||
console.log(index);
|
||||
`,
|
||||
"/index.html": `
|
||||
|
||||
@@ -485,7 +485,7 @@ export const largeModule = {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/in/entry.js": `
|
||||
import htmlContent from './template.html';
|
||||
import htmlContent from './template.html' with { type: 'file' };
|
||||
console.log('Loaded HTML:', htmlContent);`,
|
||||
|
||||
"/in/template.html": `
|
||||
|
||||
370
test/bundler/html-import-manifest.test.ts
Normal file
370
test/bundler/html-import-manifest.test.ts
Normal file
@@ -0,0 +1,370 @@
|
||||
import { describe, expect } from "bun:test";
|
||||
import { itBundled } from "./expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
// Test HTML import manifest with enhanced metadata
|
||||
itBundled("html-import/manifest-with-metadata", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/server.js": `
|
||||
import html from "./client.html";
|
||||
|
||||
if (!html.files.find(a => a.path === html.index)) {
|
||||
throw new Error("Bad file");
|
||||
}
|
||||
|
||||
console.log(JSON.stringify(html, null, 2));
|
||||
|
||||
`,
|
||||
"/client.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="./styles.css">
|
||||
<script src="./client.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Client HTML</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/styles.css": `
|
||||
body {
|
||||
background-color: #f0f0f0;
|
||||
margin: 0;
|
||||
padding: 20px;
|
||||
}
|
||||
h1 {
|
||||
color: #333;
|
||||
}`,
|
||||
"/client.js": `
|
||||
import favicon from './favicon.png';
|
||||
console.log("Client script loaded");
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
console.log('DOM ready');
|
||||
});
|
||||
console.log(favicon);
|
||||
`,
|
||||
"/favicon.png": Buffer.from([
|
||||
0x89,
|
||||
0x50,
|
||||
0x4e,
|
||||
0x47,
|
||||
0x0d,
|
||||
0x0a,
|
||||
0x1a,
|
||||
0x0a, // PNG header
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x0d,
|
||||
0x49,
|
||||
0x48,
|
||||
0x44,
|
||||
0x52, // IHDR chunk
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x10,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x10, // 16x16
|
||||
0x08,
|
||||
0x02,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x90,
|
||||
0x91,
|
||||
0x68, // 8-bit RGB
|
||||
0x36,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0x49,
|
||||
0x45,
|
||||
0x4e, // IEND chunk
|
||||
0x44,
|
||||
0xae,
|
||||
0x42,
|
||||
0x60,
|
||||
0x82,
|
||||
]),
|
||||
},
|
||||
entryPoints: ["/server.js"],
|
||||
target: "bun",
|
||||
|
||||
run: {
|
||||
validate({ stdout, stderr }) {
|
||||
expect(stdout).toMatchInlineSnapshot(`
|
||||
"{
|
||||
"index": "./client.html",
|
||||
"files": [
|
||||
{
|
||||
"input": "client.html",
|
||||
"path": "./client-5y90hwq3.js",
|
||||
"loader": "js",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "xGxKikG0dN0",
|
||||
"content-type": "text/javascript;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "client.html",
|
||||
"path": "./client.html",
|
||||
"loader": "html",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "hZ3u5t2Rmuo",
|
||||
"content-type": "text/html;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "client.html",
|
||||
"path": "./client-0z58sk45.css",
|
||||
"loader": "css",
|
||||
"isEntry": true,
|
||||
"headers": {
|
||||
"etag": "0k_h5oYVQlA",
|
||||
"content-type": "text/css;charset=utf-8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"input": "favicon.png",
|
||||
"path": "./favicon-wjepk3hq.png",
|
||||
"loader": "file",
|
||||
"isEntry": false,
|
||||
"headers": {
|
||||
"etag": "fFLOVvPDEZc",
|
||||
"content-type": "image/png"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"
|
||||
`);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Test manifest with multiple HTML imports
|
||||
itBundled("html-import/multiple-manifests", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/server.js": `
|
||||
import homeHtml from "./home.html";
|
||||
import aboutHtml from "./about.html";
|
||||
console.log("Home manifest:", homeHtml);
|
||||
console.log("About manifest:", aboutHtml);
|
||||
`,
|
||||
"/home.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="./home.css">
|
||||
<script src="./home.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Home Page</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/about.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="./about.css">
|
||||
<script src="./about.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>About Page</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/home.css": "body { background: #fff; }",
|
||||
"/home.js": "console.log('Home page');",
|
||||
"/about.css": "body { background: #f0f0f0; }",
|
||||
"/about.js": "console.log('About page');",
|
||||
},
|
||||
entryPoints: ["/server.js"],
|
||||
target: "bun",
|
||||
|
||||
onAfterBundle(api) {
|
||||
const serverCode = api.readFile("out/server.js");
|
||||
|
||||
// The manifests are embedded as escaped JSON strings in __jsonParse calls
|
||||
const manifestMatches = [...serverCode.matchAll(/__jsonParse\("(.+?)"\)/gs)];
|
||||
expect(manifestMatches.length).toBe(2);
|
||||
let manifests = [];
|
||||
for (const match of manifestMatches) {
|
||||
// The captured group contains the escaped JSON string
|
||||
const escapedJson = match[1];
|
||||
// Parse the escaped JSON string
|
||||
const manifest = JSON.parse(JSON.parse('"' + escapedJson + '"'));
|
||||
manifests.push(manifest);
|
||||
expect(manifest.index).toBeDefined();
|
||||
expect(manifest.files).toBeDefined();
|
||||
expect(Array.isArray(manifest.files)).toBe(true);
|
||||
|
||||
// Each manifest should have HTML, JS, and CSS
|
||||
const loaders = manifest.files.map((f: any) => f.loader);
|
||||
expect(loaders).toContain("html");
|
||||
expect(loaders).toContain("js");
|
||||
expect(loaders).toContain("css");
|
||||
|
||||
// All files should have enhanced metadata
|
||||
for (const file of manifest.files) {
|
||||
expect(file).toHaveProperty("headers");
|
||||
expect(file).toHaveProperty("isEntry");
|
||||
expect(file.headers).toHaveProperty("etag");
|
||||
expect(file.headers).toHaveProperty("content-type");
|
||||
}
|
||||
}
|
||||
|
||||
expect(manifests).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/javascript;charset=utf-8",
|
||||
"etag": "DLJP98vzFzQ",
|
||||
},
|
||||
"input": "home.html",
|
||||
"isEntry": true,
|
||||
"loader": "js",
|
||||
"path": "./home-5f8tg1jd.js",
|
||||
},
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/html;charset=utf-8",
|
||||
"etag": "_Qy4EtlcGvs",
|
||||
},
|
||||
"input": "home.html",
|
||||
"isEntry": true,
|
||||
"loader": "html",
|
||||
"path": "./home.html",
|
||||
},
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/css;charset=utf-8",
|
||||
"etag": "6qg2qb7a2qo",
|
||||
},
|
||||
"input": "home.html",
|
||||
"isEntry": true,
|
||||
"loader": "css",
|
||||
"path": "./home-5pdcqqze.css",
|
||||
},
|
||||
],
|
||||
"index": "./home.html",
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/javascript;charset=utf-8",
|
||||
"etag": "t8rrkgPylZo",
|
||||
},
|
||||
"input": "about.html",
|
||||
"isEntry": true,
|
||||
"loader": "js",
|
||||
"path": "./about-e59abjgr.js",
|
||||
},
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/html;charset=utf-8",
|
||||
"etag": "igL7YEH9e0I",
|
||||
},
|
||||
"input": "about.html",
|
||||
"isEntry": true,
|
||||
"loader": "html",
|
||||
"path": "./about.html",
|
||||
},
|
||||
{
|
||||
"headers": {
|
||||
"content-type": "text/css;charset=utf-8",
|
||||
"etag": "DE8kdBXWhVg",
|
||||
},
|
||||
"input": "about.html",
|
||||
"isEntry": true,
|
||||
"loader": "css",
|
||||
"path": "./about-7apjgk42.css",
|
||||
},
|
||||
],
|
||||
"index": "./about.html",
|
||||
},
|
||||
]
|
||||
`);
|
||||
},
|
||||
});
|
||||
|
||||
// Test that import with {type: 'file'} still works as a file import
|
||||
itBundled("html-import/with-type-file-attribute", {
|
||||
outdir: "out/",
|
||||
files: {
|
||||
"/entry.js": `
|
||||
import htmlUrl from "./page.html" with { type: 'file' };
|
||||
import htmlManifest from "./index.html";
|
||||
|
||||
// Test that htmlUrl is a string (file path)
|
||||
if (typeof htmlUrl !== 'string') {
|
||||
throw new Error("Expected htmlUrl to be a string, got " + typeof htmlUrl);
|
||||
}
|
||||
|
||||
// Test that htmlManifest is an object with expected properties
|
||||
if (typeof htmlManifest !== 'object' || !htmlManifest.index || !Array.isArray(htmlManifest.files)) {
|
||||
throw new Error("Expected htmlManifest to be an object with index and files array");
|
||||
}
|
||||
|
||||
console.log("✓ File import returned URL:", htmlUrl);
|
||||
console.log("✓ HTML import returned manifest with", htmlManifest.files.length, "files");
|
||||
console.log("✓ Both import types work correctly");
|
||||
`,
|
||||
"/page.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Page imported as file</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>This HTML is imported with type: 'file'</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="./styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Test Page</h1>
|
||||
</body>
|
||||
</html>`,
|
||||
"/styles.css": `body { background: #fff; }`,
|
||||
},
|
||||
entryPoints: ["/entry.js"],
|
||||
target: "bun",
|
||||
|
||||
run: {
|
||||
validate({ stdout }) {
|
||||
expect(stdout).toContain("✓ File import returned URL:");
|
||||
expect(stdout).toContain("✓ HTML import returned manifest with");
|
||||
expect(stdout).toContain("✓ Both import types work correctly");
|
||||
},
|
||||
},
|
||||
|
||||
onAfterBundle(api) {
|
||||
// Check that the generated code correctly handles both import types
|
||||
const entryCode = api.readFile("out/entry.js");
|
||||
|
||||
// Should have a file import for page.html
|
||||
expect(entryCode).toContain('var page_default = "./page-');
|
||||
expect(entryCode).toContain('.html";');
|
||||
|
||||
// Should have a manifest import for index.html
|
||||
expect(entryCode).toContain('__jsonParse("');
|
||||
expect(entryCode).toContain('\\\"index\\\":\\\"./index.html\\\"');
|
||||
expect(entryCode).toContain('\\\"files\\\":[');
|
||||
},
|
||||
});
|
||||
});
|
||||
@@ -32,12 +32,12 @@ const words: Record<string, { reason: string; limit?: number; regex?: boolean }>
|
||||
"== alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" },
|
||||
"!= alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" },
|
||||
|
||||
[String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 241, regex: true },
|
||||
[String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 243, regex: true },
|
||||
"usingnamespace": { reason: "Zig 0.15 will remove `usingnamespace`" },
|
||||
"catch unreachable": { reason: "For out-of-memory, prefer 'catch bun.outOfMemory()'", limit: 1854 },
|
||||
"catch unreachable": { reason: "For out-of-memory, prefer 'catch bun.outOfMemory()'", limit: 1857 },
|
||||
|
||||
"std.fs.Dir": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 180 },
|
||||
"std.fs.cwd": { reason: "Prefer bun.FD.cwd()", limit: 103 },
|
||||
"std.fs.cwd": { reason: "Prefer bun.FD.cwd()", limit: 102 },
|
||||
"std.fs.File": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 62 },
|
||||
".stdFile()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 18 },
|
||||
".stdDir()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 48 },
|
||||
|
||||
592
test/js/bun/http/bun-serve-file.test.ts
Normal file
592
test/js/bun/http/bun-serve-file.test.ts
Normal file
@@ -0,0 +1,592 @@
|
||||
import type { Server } from "bun";
|
||||
import { afterAll, beforeAll, describe, expect, it, mock, test } from "bun:test";
|
||||
import { isWindows, rmScope, tempDirWithFiles } from "harness";
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
const LARGE_SIZE = 1024 * 1024 * 8;
|
||||
const files = {
|
||||
"hello.txt": "Hello, World!",
|
||||
"empty.txt": "",
|
||||
"binary.bin": Buffer.from([0x00, 0x01, 0x02, 0x03, 0xff, 0xfe, 0xfd]),
|
||||
"large.txt": Buffer.alloc(LARGE_SIZE, "bun").toString(),
|
||||
"unicode.txt": "Hello 世界 🌍 émojis",
|
||||
"json.json": JSON.stringify({ message: "test", number: 42 }),
|
||||
"nested/file.txt": "nested content",
|
||||
"special chars & symbols.txt": "special file content",
|
||||
"will-be-deleted.txt": "will be deleted",
|
||||
"partial.txt": "0123456789ABCDEF",
|
||||
};
|
||||
|
||||
describe("Bun.file in serve routes", () => {
|
||||
let server: Server;
|
||||
let tempDir: string;
|
||||
let handler = mock(req => {
|
||||
return new Response(`fallback: ${req.url}`, {
|
||||
headers: {
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
tempDir = tempDirWithFiles("bun-serve-file-test-", files);
|
||||
|
||||
const routes = {
|
||||
"/hello.txt": {
|
||||
GET: new Response(Bun.file(join(tempDir, "hello.txt"))),
|
||||
HEAD: new Response(Bun.file(join(tempDir, "hello.txt"))),
|
||||
},
|
||||
"/empty.txt": new Response(Bun.file(join(tempDir, "empty.txt"))),
|
||||
"/empty-400.txt": new Response(Bun.file(join(tempDir, "empty.txt")), {
|
||||
status: 400,
|
||||
}),
|
||||
"/binary.bin": new Response(Bun.file(join(tempDir, "binary.bin"))),
|
||||
"/large.txt": new Response(Bun.file(join(tempDir, "large.txt"))),
|
||||
"/unicode.txt": new Response(Bun.file(join(tempDir, "unicode.txt"))),
|
||||
"/json.json": new Response(Bun.file(join(tempDir, "json.json"))),
|
||||
"/nested/file.txt": new Response(Bun.file(join(tempDir, "nested", "file.txt"))),
|
||||
"/special-chars.txt": new Response(Bun.file(join(tempDir, "special chars & symbols.txt"))),
|
||||
"/nonexistent.txt": new Response(Bun.file(join(tempDir, "does-not-exist.txt"))),
|
||||
"/with-headers.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
headers: {
|
||||
"X-Custom-Header": "custom-value",
|
||||
"Cache-Control": "max-age=3600",
|
||||
},
|
||||
}),
|
||||
"/with-status.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
status: 201,
|
||||
statusText: "Created",
|
||||
}),
|
||||
"/will-be-deleted.txt": new Response(Bun.file(join(tempDir, "will-be-deleted.txt"))),
|
||||
"/custom-last-modified.txt": new Response(Bun.file(join(tempDir, "hello.txt")), {
|
||||
headers: {
|
||||
"Last-Modified": "Wed, 21 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
}),
|
||||
"/partial.txt": new Response(Bun.file(join(tempDir, "partial.txt"))),
|
||||
"/partial-slice.txt": new Response(Bun.file(join(tempDir, "partial.txt")).slice(5, 10)),
|
||||
"/fd-not-supported.txt": (() => {
|
||||
// This would test file descriptors, but they're not supported yet
|
||||
return new Response(Bun.file(join(tempDir, "hello.txt")));
|
||||
})(),
|
||||
} as const;
|
||||
|
||||
server = Bun.serve({
|
||||
routes: routes,
|
||||
port: 0,
|
||||
fetch: handler,
|
||||
});
|
||||
server.unref();
|
||||
|
||||
unlinkSync(join(tempDir, "will-be-deleted.txt"));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
server?.stop(true);
|
||||
using _ = rmScope(tempDir);
|
||||
});
|
||||
|
||||
describe("Basic file serving", () => {
|
||||
it("serves text file", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
const headers = res.headers.toJSON();
|
||||
if (!new Date(headers["last-modified"]!).getTime()) {
|
||||
throw new Error("Last-Modified header is not a valid date");
|
||||
}
|
||||
|
||||
if (!new Date(headers["date"]!).getTime()) {
|
||||
throw new Error("Date header is not a valid date");
|
||||
}
|
||||
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
// Snapshot the headers so a test fails if we change the headers later.
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "13",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves empty file", async () => {
|
||||
const res = await fetch(new URL(`/empty.txt`, server.url));
|
||||
expect(res.status).toBe(204);
|
||||
expect(await res.text()).toBe("");
|
||||
// A server MUST NOT send a Content-Length header field in any response
|
||||
// with a status code of 1xx (Informational) or 204 (No Content). A server
|
||||
// MUST NOT send a Content-Length header field in any 2xx (Successful)
|
||||
// response to a CONNECT request (Section 9.3.6).
|
||||
expect(res.headers.get("Content-Length")).toBeNull();
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves empty file with custom status code", async () => {
|
||||
const res = await fetch(new URL(`/empty-400.txt`, server.url));
|
||||
expect(res.status).toBe(400);
|
||||
expect(await res.text()).toBe("");
|
||||
expect(res.headers.get("Content-Length")).toBe("0");
|
||||
});
|
||||
|
||||
it("serves binary file", async () => {
|
||||
const res = await fetch(new URL(`/binary.bin`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
const bytes = await res.bytes();
|
||||
expect(bytes).toEqual(new Uint8Array([0x00, 0x01, 0x02, 0x03, 0xff, 0xfe, 0xfd]));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/octet-stream/);
|
||||
});
|
||||
|
||||
it("serves large file", async () => {
|
||||
const res = await fetch(new URL(`/large.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
const text = await res.text();
|
||||
expect(text).toHaveLength(LARGE_SIZE);
|
||||
|
||||
if (files["large.txt"] !== text) {
|
||||
console.log("Expected length:", files["large.txt"].length);
|
||||
console.log("Actual length:", text.length);
|
||||
console.log("First 100 chars expected:", files["large.txt"].slice(0, 100));
|
||||
console.log("First 100 chars actual:", text.slice(0, 100));
|
||||
console.log("Last 100 chars expected:", files["large.txt"].slice(-100));
|
||||
console.log("Last 100 chars actual:", text.slice(-100));
|
||||
|
||||
// Find first difference
|
||||
for (let i = 0; i < Math.min(files["large.txt"].length, text.length); i++) {
|
||||
if (files["large.txt"][i] !== text[i]) {
|
||||
console.log(`First difference at index ${i}:`);
|
||||
console.log(`Expected: "${files["large.txt"][i]}" (code: ${files["large.txt"].charCodeAt(i)})`);
|
||||
console.log(`Actual: "${text[i]}" (code: ${text.charCodeAt(i)})`);
|
||||
console.log(`Context around difference: "${files["large.txt"].slice(Math.max(0, i - 10), i + 10)}"`);
|
||||
console.log(`Actual context: "${text.slice(Math.max(0, i - 10), i + 10)}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
throw new Error("large.txt is not the same");
|
||||
}
|
||||
|
||||
expect(res.headers.get("Content-Length")).toBe(LARGE_SIZE.toString());
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "${LARGE_SIZE}",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves unicode file", async () => {
|
||||
const res = await fetch(new URL(`/unicode.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello 世界 🌍 émojis");
|
||||
|
||||
const headers = res.headers.toJSON();
|
||||
delete headers.date;
|
||||
delete headers["last-modified"];
|
||||
|
||||
expect(headers).toMatchInlineSnapshot(`
|
||||
{
|
||||
"content-length": "25",
|
||||
"content-type": "text/plain;charset=utf-8",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it("serves JSON file with correct content type", async () => {
|
||||
const res = await fetch(new URL(`/json.json`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.json()).toEqual({ message: "test", number: 42 });
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/json/);
|
||||
});
|
||||
|
||||
it("serves nested file", async () => {
|
||||
const res = await fetch(new URL(`/nested/file.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("nested content");
|
||||
});
|
||||
|
||||
it("serves file with special characters in name", async () => {
|
||||
const res = await fetch(new URL(`/special-chars.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("special file content");
|
||||
});
|
||||
});
|
||||
|
||||
describe("HTTP methods", () => {
|
||||
it("supports HEAD requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "HEAD" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("");
|
||||
expect(res.headers.get("Content-Length")).toBe("13"); // "Hello, World!" length
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
});
|
||||
|
||||
it("supports GET requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "GET" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Custom headers and status", () => {
|
||||
it("preserves custom headers", async () => {
|
||||
const res = await fetch(new URL(`/with-headers.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
expect(res.headers.get("X-Custom-Header")).toBe("custom-value");
|
||||
expect(res.headers.get("Cache-Control")).toBe("max-age=3600");
|
||||
});
|
||||
|
||||
it("preserves custom status", async () => {
|
||||
const res = await fetch(new URL(`/with-status.txt`, server.url));
|
||||
expect(res.status).toBe(201);
|
||||
expect(res.statusText).toBe("Created");
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error handling", () => {
|
||||
it("handles nonexistent files gracefully", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/nonexistent.txt`, server.url));
|
||||
|
||||
// Should fall back to the handler since file doesn't exist
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}nonexistent.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe.todo("Range requests", () => {
|
||||
it("supports partial content requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"Range": "bytes=0-4",
|
||||
},
|
||||
});
|
||||
|
||||
if (res.status === 206) {
|
||||
expect(await res.text()).toBe("Hello");
|
||||
expect(res.headers.get("Content-Range")).toMatch(/bytes 0-4\/13/);
|
||||
expect(res.headers.get("Accept-Ranges")).toBe("bytes");
|
||||
} else {
|
||||
// If range requests aren't supported, should return full content
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
}
|
||||
});
|
||||
|
||||
it("handles invalid range requests", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"Range": "bytes=20-30", // Beyond file size
|
||||
},
|
||||
});
|
||||
|
||||
// Should either return 416 Range Not Satisfiable or 200 with full content
|
||||
expect([200, 416]).toContain(res.status);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Conditional requests", () => {
|
||||
describe.each(["GET", "HEAD"])("%s", method => {
|
||||
it(`handles If-Modified-Since with future date (304)`, async () => {
|
||||
// First request to get Last-Modified
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
expect(lastModified).not.toBeEmpty();
|
||||
|
||||
// If-Modified-Since is AFTER the file's last modified date (future)
|
||||
// Should return 304 because file hasn't been modified since that future date
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method,
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
|
||||
it(`handles If-Modified-Since with past date (200)`, async () => {
|
||||
// If-Modified-Since is way in the past
|
||||
// Should return 200 because file has been modified since then
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method,
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.now() - 1000000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
it("ignores If-Modified-Since for non-GET/HEAD requests", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
// Should not return 304 for POST
|
||||
expect(res2.status).not.toBe(304);
|
||||
});
|
||||
|
||||
it.todo("handles ETag", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const etag = res1.headers.get("ETag");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"If-None-Match": etag!,
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Stress testing", () => {
|
||||
test.each(["hello.txt", "large.txt"])(
|
||||
"concurrent requests for %s",
|
||||
async filename => {
|
||||
const batchSize = isWindows ? 8 : 32;
|
||||
const iterations = isWindows ? 2 : 5;
|
||||
|
||||
async function iterate() {
|
||||
const promises = Array.from({ length: batchSize }, () =>
|
||||
fetch(`${server.url}${filename}`).then(res => {
|
||||
expect(res.status).toBe(200);
|
||||
return res.text();
|
||||
}),
|
||||
);
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
|
||||
// Verify all responses are identical
|
||||
const expected = results[0];
|
||||
results.forEach(result => {
|
||||
expect(result).toBe(expected);
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
await iterate();
|
||||
Bun.gc();
|
||||
}
|
||||
},
|
||||
30000,
|
||||
);
|
||||
|
||||
it("memory usage stays reasonable", async () => {
|
||||
Bun.gc(true);
|
||||
const baseline = (process.memoryUsage.rss() / 1024 / 1024) | 0;
|
||||
|
||||
// Make many requests to large file
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const res = await fetch(new URL(`/large.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
await res.text(); // Consume the response
|
||||
}
|
||||
|
||||
Bun.gc(true);
|
||||
const final = (process.memoryUsage.rss() / 1024 / 1024) | 0;
|
||||
const delta = final - baseline;
|
||||
|
||||
expect(delta).toBeLessThan(100); // Should not leak significant memory
|
||||
}, 30000);
|
||||
|
||||
it("deleted file goes to handler", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/will-be-deleted.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}will-be-deleted.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Handler fallback", () => {
|
||||
it("falls back to handler for unmatched routes", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/not-in-routes.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}not-in-routes.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
|
||||
it("does not call handler for matched file routes", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("Hello, World!");
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Last-Modified header handling", () => {
|
||||
it("automatically adds Last-Modified header", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res.headers.get("Last-Modified");
|
||||
expect(lastModified).not.toBeNull();
|
||||
expect(lastModified).toMatch(/^[A-Za-z]{3}, \d{2} [A-Za-z]{3} \d{4} \d{2}:\d{2}:\d{2} GMT$/);
|
||||
});
|
||||
|
||||
it("respects custom Last-Modified header", async () => {
|
||||
const res = await fetch(new URL(`/custom-last-modified.txt`, server.url));
|
||||
expect(res.headers.get("Last-Modified")).toBe("Wed, 21 Oct 2015 07:28:00 GMT");
|
||||
});
|
||||
|
||||
it("uses custom Last-Modified for If-Modified-Since checks", async () => {
|
||||
// Request with If-Modified-Since after custom date
|
||||
const res1 = await fetch(new URL(`/custom-last-modified.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": "Thu, 22 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
});
|
||||
expect(res1.status).toBe(304);
|
||||
|
||||
// Request with If-Modified-Since before custom date
|
||||
const res2 = await fetch(new URL(`/custom-last-modified.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": "Tue, 20 Oct 2015 07:28:00 GMT",
|
||||
},
|
||||
});
|
||||
expect(res2.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File slicing", () => {
|
||||
it("serves complete file", async () => {
|
||||
const res = await fetch(new URL(`/partial.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("0123456789ABCDEF");
|
||||
expect(res.headers.get("Content-Length")).toBe("16");
|
||||
});
|
||||
|
||||
it("serves sliced file", async () => {
|
||||
const res = await fetch(new URL(`/partial-slice.txt`, server.url));
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe("56789");
|
||||
expect(res.headers.get("Content-Length")).toBe("5");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Special status codes", () => {
|
||||
it("returns 204 for empty files with 200 status", async () => {
|
||||
const res = await fetch(new URL(`/empty.txt`, server.url));
|
||||
expect(res.status).toBe(204);
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("preserves custom status for empty files", async () => {
|
||||
const res = await fetch(new URL(`/empty-400.txt`, server.url));
|
||||
expect(res.status).toBe(400);
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("returns appropriate status for 304 responses", async () => {
|
||||
const res1 = await fetch(new URL(`/hello.txt`, server.url));
|
||||
const lastModified = res1.headers.get("Last-Modified");
|
||||
|
||||
const res2 = await fetch(new URL(`/hello.txt`, server.url), {
|
||||
headers: {
|
||||
"If-Modified-Since": new Date(Date.parse(lastModified!) + 10000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res2.status).toBe(304);
|
||||
expect(res2.headers.get("Content-Length")).toBeNull();
|
||||
expect(await res2.text()).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Streaming and file types", () => {
|
||||
it("sets Content-Length for regular files", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.headers.get("Content-Length")).toBe("13");
|
||||
});
|
||||
|
||||
it("handles HEAD requests with proper headers", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url), { method: "HEAD" });
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.headers.get("Content-Length")).toBe("13");
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
expect(res.headers.get("Last-Modified")).not.toBeNull();
|
||||
expect(await res.text()).toBe("");
|
||||
});
|
||||
|
||||
it("handles abort/cancellation gracefully", async () => {
|
||||
const controller = new AbortController();
|
||||
const promise = fetch(new URL(`/large.txt`, server.url), {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
// Abort immediately
|
||||
controller.abort();
|
||||
|
||||
await expect(promise).rejects.toThrow(/abort/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe("File not found handling", () => {
|
||||
it("falls back to handler when file doesn't exist", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/nonexistent.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}nonexistent.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
|
||||
it("falls back to handler when file is deleted after route creation", async () => {
|
||||
const previousCallCount = handler.mock.calls.length;
|
||||
const res = await fetch(new URL(`/will-be-deleted.txt`, server.url));
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(await res.text()).toBe(`fallback: ${server.url}will-be-deleted.txt`);
|
||||
expect(handler.mock.calls.length).toBe(previousCallCount + 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Content-Type detection", () => {
|
||||
it("detects text/plain for .txt files", async () => {
|
||||
const res = await fetch(new URL(`/hello.txt`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/text\/plain/);
|
||||
});
|
||||
|
||||
it("detects application/json for .json files", async () => {
|
||||
const res = await fetch(new URL(`/json.json`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/json/);
|
||||
});
|
||||
|
||||
it("detects application/octet-stream for binary files", async () => {
|
||||
const res = await fetch(new URL(`/binary.bin`, server.url));
|
||||
expect(res.headers.get("Content-Type")).toMatch(/application\/octet-stream/);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user