Compare commits

...

7 Commits

Author SHA1 Message Date
Claude Bot
cb26928b62 Add error handler to third test's client script
Added req.on('error') handler to match the pattern used in the first test,
ensuring consistent error handling and debuggable failures.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 19:38:36 +00:00
Claude Bot
08baed00ae Update tests to use HTTPS to properly exercise TLS handshake race condition
- Changed all servers to use TLS via Bun.serve({ tls })
- Changed clients to use node:https instead of node:http
- Added rejectUnauthorized: false for self-signed certs
- Added strictSSL: false for request-promise

This properly exercises the original issue which only occurs during
new HTTPS connections due to the TLS handshake timing gap.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 19:25:28 +00:00
Claude Bot
32747384b7 Keep test timeout for npm package installation
The request-promise test requires a longer timeout (60s) because it
installs npm packages. Added comment explaining why the timeout is needed.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 18:58:09 +00:00
Claude Bot
8ca4cc9796 Address additional review feedback
- Remove global setDefaultTimeout, use per-test timeout for the slow test
- Use `await using` for all Bun.spawn calls for proper cleanup
- Convert single-file tests to use `-e` inline execution instead of tempDir
- Replace `.repeat()` with `Buffer.alloc().toString()` in client scripts

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 18:43:50 +00:00
Claude Bot
483ccc63d5 Address additional review comments
- Change catch parameter from `e: any` to `e: unknown` with safe error message extraction
- Log install stdout/stderr on failure for easier debugging

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 18:29:37 +00:00
Claude Bot
5e1b61c942 Address review comments for test assertions
- Reorder assertions to check stdout before exitCode for better error messages
- Use Buffer.alloc instead of string repeat for test file content

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 18:16:38 +00:00
Claude Bot
4c27d0df04 fix(node:https): ensure all chunks are yielded in streaming body
Fix a race condition in the node:https ClientRequest streaming body
where chunks could be lost when using request-promise + fs.createReadStream()
for multipart uploads.

The issue occurred because:
1. When multiple chunks were pushed to kBodyChunks between Promise resolutions
   in the async generator, only one chunk was yielded per iteration
2. When the request finished (self.finished = true), any remaining chunks in
   kBodyChunks were not yielded before the generator returned

This fix adds two safety loops:
1. After each Promise resolution, yield any remaining chunks that were pushed
2. After the main loop exits, yield any chunks that remain in the buffer

This is particularly important for HTTPS connections where the TLS handshake
delay can cause timing differences between when data is piped and when it's
consumed by the stream.

Fixes #26638

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 18:05:01 +00:00
2 changed files with 323 additions and 0 deletions

View File

@@ -363,11 +363,27 @@ function ClientRequest(input, options, cb) {
};
});
// Yield any remaining chunks that may have been pushed
// between the Promise resolving and now (race condition fix)
while (self[kBodyChunks]?.length > 0) {
yield self[kBodyChunks].shift();
}
if (self[kBodyChunks]?.length === 0) {
self.emit("drain");
}
}
// After the loop exits (self.finished is true), there may still
// be chunks in the buffer that were pushed before/during the
// self.finished check. Yield any remaining chunks to ensure
// all data is sent. This fixes issue #26638 where the first
// HTTPS request would be truncated due to race conditions
// between the TLS handshake timing and data piping.
while (self[kBodyChunks]?.length > 0) {
yield self[kBodyChunks].shift();
}
handleResponse?.();
};
}

View File

@@ -0,0 +1,307 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir, tls } from "harness";
// Test for GitHub issue #26638
// First multipart upload over HTTPS corrupts the body when using request-promise + fs.createReadStream()
// The issue is that chunks can be lost due to race conditions between the TLS handshake timing
// and when data is piped to the ClientRequest.
describe("issue #26638", () => {
test("node:https streaming body yields all chunks even when end() is called quickly", async () => {
// This test simulates the race condition where:
// 1. Multiple chunks are written quickly to the ClientRequest over HTTPS
// 2. The request is ended before all chunks have been yielded by the async generator
// The fix ensures that all buffered chunks are yielded after the finished flag is set.
using server = Bun.serve({
port: 0,
tls,
async fetch(req) {
const text = await req.text();
return new Response(
JSON.stringify({
success: true,
bytesReceived: text.length,
}),
{ headers: { "Content-Type": "application/json" } },
);
},
});
const dashes = Buffer.alloc(100, "-").toString();
const clientScript = `
const https = require('https');
const chunks = [];
for (let i = 0; i < 100; i++) {
chunks.push('chunk' + i.toString().padStart(3, '0') + '${dashes}');
}
const expectedContent = chunks.join('');
const req = https.request('https://localhost:${server.port}/', {
method: 'POST',
headers: {
'Content-Type': 'text/plain',
'Transfer-Encoding': 'chunked',
},
rejectUnauthorized: false,
}, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
try {
const result = JSON.parse(data);
if (result.bytesReceived !== expectedContent.length) {
console.error('Length mismatch! Expected:', expectedContent.length, 'Got:', result.bytesReceived);
process.exit(1);
}
console.log(JSON.stringify(result));
} catch (e) {
console.error('Failed to parse response:', e.message);
process.exit(1);
}
});
});
req.on('error', (e) => {
console.error('Request error:', e.message);
process.exit(1);
});
// Write chunks quickly to simulate fast data piping
for (const chunk of chunks) {
req.write(chunk);
}
req.end();
`;
// Run the client with inline script
await using proc = Bun.spawn({
cmd: [bunExe(), "-e", clientScript],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
// Check stdout before exitCode for better error messages on test failure
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
// 100 chunks, each is "chunkXXX" + 100 dashes = 8 + 100 = 108 chars
expect(result.bytesReceived).toBe(100 * 108);
expect(exitCode).toBe(0);
});
// This test installs npm packages which may take longer than the default timeout
test(
"request-promise with form-data and fs.createReadStream works correctly over HTTPS",
{ timeout: 60_000 },
async () => {
// This test specifically reproduces the original issue:
// Using request-promise with form-data piping an fs.createReadStream over HTTPS
using server = Bun.serve({
port: 0,
tls,
async fetch(req) {
try {
const formData = await req.formData();
const file = formData.get("sourceFile");
if (!(file instanceof Blob)) {
return new Response(JSON.stringify({ success: false, error: "No file found" }), {
status: 400,
headers: { "Content-Type": "application/json" },
});
}
const content = await file.arrayBuffer();
return new Response(
JSON.stringify({
success: true,
bytesReceived: file.size,
// Verify content is correct (should be all 'A's)
contentValid: new Uint8Array(content).every(b => b === 65), // 65 is 'A'
}),
{ headers: { "Content-Type": "application/json" } },
);
} catch (e: unknown) {
let errorMessage: string;
if (e instanceof Error) {
errorMessage = e.message;
} else if (typeof e === "object" && e !== null && "message" in e) {
errorMessage = String((e as { message: unknown }).message);
} else {
errorMessage = String(e);
}
return new Response(JSON.stringify({ success: false, error: errorMessage }), {
status: 500,
headers: { "Content-Type": "application/json" },
});
}
},
});
using dir = tempDir("test-26638-form", {
"package.json": JSON.stringify({
name: "test-26638",
dependencies: {
request: "^2.88.2",
"request-promise": "^4.2.6",
},
}),
// Create a test file with known content (100KB)
"testfile.txt": Buffer.alloc(1024 * 100, "A").toString(),
"client.js": `
const fs = require('fs');
const request = require('request-promise');
async function upload() {
try {
const result = await request.post('https://localhost:${server.port}/', {
formData: {
sourceFile: fs.createReadStream('./testfile.txt'),
},
json: true,
strictSSL: false,
});
console.log(JSON.stringify(result));
} catch (e) {
console.error('Error:', e.statusCode, e.error?.error || e.message);
process.exit(1);
}
}
upload();
`,
});
// Install dependencies
await using installProc = Bun.spawn({
cmd: [bunExe(), "install"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [installStdout, installStderr, installExitCode] = await Promise.all([
installProc.stdout.text(),
installProc.stderr.text(),
installProc.exited,
]);
if (installExitCode !== 0) {
console.error("Install stdout:", installStdout);
console.error("Install stderr:", installStderr);
}
expect(installExitCode).toBe(0);
// Run the client
await using proc = Bun.spawn({
cmd: [bunExe(), "client.js"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
// Check stdout before exitCode for better error messages on test failure
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
expect(result.bytesReceived).toBe(1024 * 100);
expect(result.contentValid).toBe(true);
expect(exitCode).toBe(0);
},
);
test("multiple rapid writes followed by immediate end() yields all data over HTTPS", async () => {
// This test ensures that when many writes happen in quick succession
// followed by an immediate end() over HTTPS, no data is lost.
using server = Bun.serve({
port: 0,
tls,
async fetch(req) {
const text = await req.text();
return new Response(
JSON.stringify({
success: true,
bytesReceived: text.length,
}),
{ headers: { "Content-Type": "application/json" } },
);
},
});
const chunkContent = Buffer.alloc(100, "X").toString();
const clientScript = `
const https = require('https');
const numChunks = 1000;
const chunkSize = 100;
const expectedLength = numChunks * chunkSize;
const req = https.request('https://localhost:${server.port}/', {
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Transfer-Encoding': 'chunked',
},
rejectUnauthorized: false,
}, (res) => {
let data = '';
res.on('data', (chunk) => { data += chunk; });
res.on('end', () => {
const result = JSON.parse(data);
if (result.bytesReceived !== expectedLength) {
console.error('FAIL: Expected', expectedLength, 'bytes, got', result.bytesReceived);
process.exit(1);
}
console.log(JSON.stringify(result));
});
});
req.on('error', (e) => {
console.error('Request error:', e.message);
process.exit(1);
});
// Write many chunks as fast as possible
const chunk = '${chunkContent}';
for (let i = 0; i < numChunks; i++) {
req.write(chunk);
}
// End immediately after all writes
req.end();
`;
await using proc = Bun.spawn({
cmd: [bunExe(), "-e", clientScript],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
if (stderr) {
console.error("stderr:", stderr);
}
// Check stdout before exitCode for better error messages on test failure
expect(stdout.trim()).not.toBe("");
const result = JSON.parse(stdout.trim());
expect(result.success).toBe(true);
expect(result.bytesReceived).toBe(1000 * 100); // 1000 chunks * 100 bytes
expect(exitCode).toBe(0);
});
});