Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
e528d3cc60 fix(http): preserve explicit Content-Length header for streaming request bodies
When using node:http's ClientRequest with an explicit Content-Length
header and streaming the body via req.write(), Bun was silently
dropping the Content-Length and switching to chunked transfer encoding.
This fixes the HTTP client to respect user-provided Content-Length
headers even when the request body is a stream.

Closes #26976

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-12 20:20:08 +00:00
2 changed files with 102 additions and 1 deletions

View File

@@ -719,7 +719,17 @@ pub fn buildRequest(this: *HTTPClient, body_len: usize) picohttp.Request {
if (body_len > 0 or this.method.hasRequestBody()) {
if (this.flags.is_streaming_request_body) {
if (add_transfer_encoding and this.flags.upgrade_state == .none) {
// If the user explicitly provided a Content-Length header, preserve it
// instead of using chunked transfer encoding. This is needed for
// Node.js http.request() compatibility where users stream the body
// via req.write() but set Content-Length explicitly.
if (original_content_length) |content_length| {
request_headers_buf[header_count] = .{
.name = content_length_header_name,
.value = content_length,
};
header_count += 1;
} else if (add_transfer_encoding and this.flags.upgrade_state == .none) {
request_headers_buf[header_count] = chunked_encoded_header;
header_count += 1;
}

View File

@@ -0,0 +1,91 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe } from "harness";
// https://github.com/oven-sh/bun/issues/26976
test("node:http request.write() preserves explicit Content-Length header", async () => {
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
const http = require("node:http");
const BODY = Buffer.alloc(1024 * 64, 0x42);
const server = http.createServer((req, res) => {
const contentLength = req.headers["content-length"];
const transferEncoding = req.headers["transfer-encoding"];
let received = 0;
req.on("data", (chunk) => { received += chunk.length; });
req.on("end", () => {
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify({
contentLengthHeader: contentLength ?? null,
transferEncoding: transferEncoding ?? null,
bytesReceived: received,
}));
});
});
server.listen(0, () => {
const port = server.address().port;
const req = http.request(
"http://127.0.0.1:" + port + "/upload",
{
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"Content-Length": BODY.length,
},
},
(res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
const result = JSON.parse(Buffer.concat(chunks).toString());
console.log(JSON.stringify(result));
server.close();
});
},
);
// Stream the body in multiple chunks via req.write()
const CHUNK_SIZE = 1024 * 16;
let offset = 0;
function writeNext() {
if (offset >= BODY.length) {
req.end();
return;
}
const chunk = BODY.subarray(offset, offset + CHUNK_SIZE);
offset += chunk.length;
if (!req.write(chunk)) {
req.once("drain", writeNext);
} else {
// Use setImmediate to avoid stack overflow and ensure multiple write() calls
setImmediate(writeNext);
}
}
writeNext();
});
`,
],
env: bunEnv,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).toBe("");
expect(exitCode).toBe(0);
const result = JSON.parse(stdout.trim());
// The server should receive the explicit Content-Length header
expect(result.contentLengthHeader).toBe(String(1024 * 64));
// Transfer-Encoding should NOT be chunked when Content-Length is explicitly set
expect(result.transferEncoding).toBeNull();
// All bytes should be received
expect(result.bytesReceived).toBe(1024 * 64);
});