Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
8e88d1d9b1 fix: preserve user-set Content-Length header for ReadableStream responses
When a Response with a ReadableStream body had a manually set
Content-Length header, it was always stripped and replaced with
Transfer-Encoding: chunked. This prevented users from proxying
responses while preserving the original Content-Length.

The fix saves the user-provided Content-Length value before
writeHeaders strips it, and for stream bodies (where the blob is
detached), writes it back and marks the uWS content-length flag
so that subsequent write() calls use content-length framing
instead of chunked transfer encoding.

Closes #10507

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-20 05:18:06 +00:00
2 changed files with 199 additions and 0 deletions

View File

@@ -2254,6 +2254,11 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
defer if (content_type_needs_free) content_type.deinit(this.allocator);
var has_content_disposition = false;
var has_content_range = false;
// Save user-provided Content-Length before writeHeaders strips it.
// This is needed for ReadableStream bodies where the user knows the
// total size upfront (e.g. proxy responses). For known-size bodies
// (blob/string), tryEnd() will set Content-Length from the actual size.
var user_content_length: ?usize = null;
if (response.swapInitHeaders()) |headers_| {
defer headers_.deref();
has_content_disposition = headers_.fastHas(.ContentDisposition);
@@ -2263,6 +2268,14 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
status = 206;
}
if (this.blob.isDetached()) {
if (headers_.fastGet(.ContentLength)) |cl| {
const cl_str = cl.toSlice(this.allocator);
defer cl_str.deinit();
user_content_length = std.fmt.parseInt(usize, cl_str.slice(), 10) catch null;
}
}
this.doWriteStatus(status);
this.doWriteHeaders(headers_);
} else if (needs_content_range) {
@@ -2306,6 +2319,12 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
if (this.flags.needs_content_length) {
resp.writeHeaderInt("content-length", size);
this.flags.needs_content_length = false;
} else if (user_content_length) |cl| {
// For ReadableStream bodies where the user explicitly set
// Content-Length, write it and tell uWS to use content-length
// framing instead of chunked transfer encoding.
resp.writeHeaderInt("content-length", cl);
resp.markWroteContentLengthHeader();
}
if (needs_content_range and !has_content_range) {

View File

@@ -0,0 +1,180 @@
import { expect, test } from "bun:test";
// Helper to get raw HTTP response headers via TCP socket
async function getRawResponse(port: number, path: string = "/"): Promise<{ headers: string; body: Buffer }> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
const timeout = setTimeout(() => reject(new Error("Timeout")), 5000);
Bun.connect({
hostname: "localhost",
port,
socket: {
data(_socket, data) {
chunks.push(Buffer.from(data));
},
open(socket) {
socket.write(`GET ${path} HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n`);
},
close() {
clearTimeout(timeout);
const full = Buffer.concat(chunks);
const headerEnd = full.indexOf("\r\n\r\n");
if (headerEnd === -1) {
reject(new Error("No header terminator found"));
return;
}
resolve({
headers: full.subarray(0, headerEnd).toString("utf8"),
body: full.subarray(headerEnd + 4),
});
},
error(_socket, err) {
clearTimeout(timeout);
reject(err);
},
},
});
});
}
function getHeader(rawHeaders: string, name: string): string | null {
const regex = new RegExp(`^${name}:\\s*(.+)$`, "mi");
const match = rawHeaders.match(regex);
return match ? match[1].trim() : null;
}
test("large streaming ReadableStream preserves user-set Content-Length", async () => {
// Use a large body (1MB) that can't be eagerly buffered into a blob
const chunkSize = 1024;
const totalChunks = 1024;
const totalSize = chunkSize * totalChunks;
const chunk = new Uint8Array(chunkSize).fill(65); // 'A'
using server = Bun.serve({
port: 0,
async fetch() {
let remaining = totalChunks;
const stream = new ReadableStream({
pull(controller) {
if (remaining <= 0) {
controller.close();
return;
}
remaining--;
controller.enqueue(chunk);
},
});
return new Response(stream, {
headers: {
"Content-Length": String(totalSize),
"Content-Type": "application/octet-stream",
},
});
},
});
const { headers, body } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(totalSize));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(body.length).toBe(totalSize);
});
test("large streaming ReadableStream without Content-Length uses chunked encoding", async () => {
const chunkSize = 1024;
const totalChunks = 1024;
const chunk = new Uint8Array(chunkSize).fill(65);
using server = Bun.serve({
port: 0,
async fetch() {
let remaining = totalChunks;
const stream = new ReadableStream({
pull(controller) {
if (remaining <= 0) {
controller.close();
return;
}
remaining--;
controller.enqueue(chunk);
},
});
return new Response(stream, {
headers: {
"Content-Type": "application/octet-stream",
},
});
},
});
const { headers } = await getRawResponse(server.port);
// Without explicit Content-Length, chunked encoding should be used
expect(getHeader(headers, "transfer-encoding")).toBe("chunked");
expect(getHeader(headers, "content-length")).toBeNull();
});
test("async ReadableStream with delay preserves user-set Content-Length", async () => {
const body = "Hello, World!";
const bodyBytes = new TextEncoder().encode(body);
using server = Bun.serve({
port: 0,
async fetch() {
const stream = new ReadableStream({
async pull(controller) {
// Delay to ensure the stream is not eagerly consumed
await Bun.sleep(10);
controller.enqueue(bodyBytes);
controller.close();
},
});
return new Response(stream, {
headers: {
"Content-Length": String(bodyBytes.length),
"Content-Type": "text/plain",
},
});
},
});
const { headers, body: respBody } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(bodyBytes.length));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(respBody.toString()).toBe(body);
});
test("multi-chunk async ReadableStream preserves user-set Content-Length", async () => {
const parts = ["Hello, ", "World", "!"];
const totalSize = parts.reduce((sum, p) => sum + new TextEncoder().encode(p).length, 0);
using server = Bun.serve({
port: 0,
async fetch() {
let index = 0;
const stream = new ReadableStream({
async pull(controller) {
if (index >= parts.length) {
controller.close();
return;
}
await Bun.sleep(5);
controller.enqueue(new TextEncoder().encode(parts[index]));
index++;
},
});
return new Response(stream, {
headers: {
"Content-Length": String(totalSize),
},
});
},
});
const { headers, body } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(totalSize));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(body.toString()).toBe("Hello, World!");
});