fix(node:https): ensure all chunks are yielded in streaming body

Fix a race condition in the node:https ClientRequest streaming body
where chunks could be lost when using request-promise + fs.createReadStream()
for multipart uploads.

The issue occurred because:
1. When multiple chunks were pushed to kBodyChunks between Promise resolutions
   in the async generator, only one chunk was yielded per iteration
2. When the request finished (self.finished = true), any remaining chunks in
   kBodyChunks were not yielded before the generator returned

This fix adds two safety loops:
1. After each Promise resolution, yield any remaining chunks that were pushed
2. After the main loop exits, yield any chunks that remain in the buffer

This is particularly important for HTTPS connections where the TLS handshake
delay can cause timing differences between when data is piped and when it's
consumed by the stream.

Fixes #26638

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Claude Bot
2026-01-31 18:05:01 +00:00
parent 71ce550cfa
commit 4c27d0df04
2 changed files with 298 additions and 0 deletions

View File

@@ -0,0 +1,282 @@
import { describe, expect, setDefaultTimeout, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
// These tests may install npm packages, so they need a longer timeout
setDefaultTimeout(60_000);
// Test for GitHub issue #26638
// First multipart upload over HTTPS corrupts the body when using request-promise + fs.createReadStream()
// The issue is that chunks can be lost due to race conditions between the TLS handshake timing
// and when data is piped to the ClientRequest.
describe("issue #26638", () => {
test("node:https streaming body yields all chunks even when end() is called quickly", async () => {
// This test simulates the race condition where:
// 1. Multiple chunks are written quickly to the ClientRequest
// 2. The request is ended before all chunks have been yielded by the async generator
// The fix ensures that all buffered chunks are yielded after the finished flag is set.
using server = Bun.serve({
port: 0,
async fetch(req) {
const text = await req.text();
return new Response(
JSON.stringify({
success: true,
bytesReceived: text.length,
}),
{ headers: { "Content-Type": "application/json" } },
);
},
});
using dir = tempDir("test-26638", {
"client.js": `
const http = require('http');
const chunks = [];
for (let i = 0; i < 100; i++) {
chunks.push('chunk' + i.toString().padStart(3, '0') + '-'.repeat(100));
}
const expectedContent = chunks.join('');
const req = http.request('http://localhost:${server.port}/', {
method: 'POST',
headers: {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
},
}, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
try {
const result = JSON.parse(data);
if (result.bytesReceived !== expectedContent.length) {
console.error('Length mismatch! Expected:', expectedContent.length, 'Got:', result.bytesReceived);
process.exit(1);
}
console.log(JSON.stringify(result));
} catch (e) {
console.error('Failed to parse response:', e.message);
process.exit(1);
}
});
});
req.on('error', (e) => {
console.error('Request error:', e.message);
process.exit(1);
});
// Write chunks quickly to simulate fast data piping
for (const chunk of chunks) {
req.write(chunk);
}
req.end();
`,
});
// Run the client
const proc = Bun.spawn({
cmd: [bunExe(), "client.js"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
expect(exitCode).toBe(0);
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
// 100 chunks, each is "chunkXXX" + 100 dashes = 8 + 100 = 108 chars
expect(result.bytesReceived).toBe(100 * 108);
});
test("request-promise with form-data and fs.createReadStream works correctly", async () => {
// This test specifically reproduces the original issue:
// Using request-promise with form-data piping an fs.createReadStream
using server = Bun.serve({
port: 0,
async fetch(req) {
try {
const formData = await req.formData();
const file = formData.get("sourceFile");
if (!(file instanceof Blob)) {
return new Response(JSON.stringify({ success: false, error: "No file found" }), {
status: 400,
headers: { "Content-Type": "application/json" },
});
}
const content = await file.arrayBuffer();
return new Response(
JSON.stringify({
success: true,
bytesReceived: file.size,
// Verify content is correct (should be all 'A's)
contentValid: new Uint8Array(content).every(b => b === 65), // 65 is 'A'
}),
{ headers: { "Content-Type": "application/json" } },
);
} catch (e: any) {
return new Response(JSON.stringify({ success: false, error: e.message }), {
status: 500,
headers: { "Content-Type": "application/json" },
});
}
},
});
using dir = tempDir("test-26638-form", {
"package.json": JSON.stringify({
name: "test-26638",
dependencies: {
request: "^2.88.2",
"request-promise": "^4.2.6",
},
}),
// Create a test file with known content
"testfile.txt": "A".repeat(1024 * 100), // 100KB file
"client.js": `
const fs = require('fs');
const request = require('request-promise');
async function upload() {
try {
const result = await request.post('http://localhost:${server.port}/', {
formData: {
sourceFile: fs.createReadStream('./testfile.txt'),
},
json: true,
});
console.log(JSON.stringify(result));
} catch (e) {
console.error('Error:', e.statusCode, e.error?.error || e.message);
process.exit(1);
}
}
upload();
`,
});
// Install dependencies
const installProc = Bun.spawn({
cmd: [bunExe(), "install"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const installExitCode = await installProc.exited;
expect(installExitCode).toBe(0);
// Run the client
const proc = Bun.spawn({
cmd: [bunExe(), "client.js"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
expect(exitCode).toBe(0);
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
expect(result.bytesReceived).toBe(1024 * 100);
expect(result.contentValid).toBe(true);
});
test("multiple rapid writes followed by immediate end() yields all data", async () => {
// This test ensures that when many writes happen in quick succession
// followed by an immediate end(), no data is lost.
using server = Bun.serve({
port: 0,
async fetch(req) {
const text = await req.text();
return new Response(
JSON.stringify({
success: true,
bytesReceived: text.length,
}),
{ headers: { "Content-Type": "application/json" } },
);
},
});
using dir = tempDir("test-26638-rapid", {
"client.js": `
const http = require('http');
const numChunks = 1000;
const chunkSize = 100;
const expectedLength = numChunks * chunkSize;
const req = http.request('http://localhost:${server.port}/', {
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Transfer-Encoding': 'chunked',
},
}, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
const result = JSON.parse(data);
if (result.bytesReceived !== expectedLength) {
console.error('FAIL: Expected', expectedLength, 'bytes, got', result.bytesReceived);
process.exit(1);
}
console.log(JSON.stringify(result));
});
});
// Write many chunks as fast as possible
const chunk = 'X'.repeat(chunkSize);
for (let i = 0; i < numChunks; i++) {
req.write(chunk);
}
// End immediately after all writes
req.end();
`,
});
const proc = Bun.spawn({
cmd: [bunExe(), "client.js"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
expect(exitCode).toBe(0);
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
expect(result.bytesReceived).toBe(1000 * 100); // 1000 chunks * 100 bytes
});
});