mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
## Summary This PR fixes multiple HTTP/2 protocol compliance issues that were causing stream errors with various HTTP/2 clients (Fauna, gRPC/Connect, etc.). fixes https://github.com/oven-sh/bun/issues/12544 fixes https://github.com/oven-sh/bun/issues/25589 ### Key Fixes **Window Size and Settings Handling** - Fix initial stream window size to use `DEFAULT_WINDOW_SIZE` until `SETTINGS_ACK` is received - Per RFC 7540 Section 6.5.1: The sender can only rely on settings being applied AFTER receiving `SETTINGS_ACK` - Properly adjust existing stream windows when `INITIAL_WINDOW_SIZE` setting changes (RFC 7540 Section 6.9.2) **Header List Size Enforcement** - Implement `maxHeaderListSize` checking per RFC 7540 Section 6.5.2 - Track cumulative header list size using HPACK entry overhead (32 bytes per RFC 7541 Section 4.1) - Reject streams with `ENHANCE_YOUR_CALM` when header list exceeds configured limit **Custom Settings Support** - Add validation for `customSettings` option (up to 10 custom settings, matching Node.js `MAX_ADDITIONAL_SETTINGS`) - Validate setting IDs are in range `[0, 0xFFFF]` per RFC 7540 - Validate setting values are in range `[0, 2^32-1]` **Settings Validation Improvements** - Use float comparison for settings validation to handle large values correctly (was using `toInt32()` which truncates) - Use proper `HTTP2_INVALID_SETTING_VALUE_RangeError` error codes for Node.js compatibility **BufferFallbackAllocator** - New allocator that tries a provided buffer first, falls back to heap: - Similar to `std.heap.stackFallback` but accepts external buffer slice - Used with `shared_request_buffer` (16KB threadlocal) for common cases - Falls back to `bun.default_allocator` for large headers ## Test Plan - [x] `bun bd` compiles successfully - [x] Node.js HTTP/2 tests pass: `bun bd test/js/node/test/parallel/test-http2-connect.js` - [x] New regression tests for frame size issues: `bun bd test test/regression/issue/25589.test.ts` - [x] HTTP/2 continuation tests: `bun bd test test/js/node/http2/node-http2-continuation.test.ts` --------- Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Claude Bot <claude-bot@bun.sh> Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
94 lines
2.6 KiB
JavaScript
94 lines
2.6 KiB
JavaScript
/**
|
|
* Node.js HTTP/2 server fixture for testing CONTINUATION frames.
|
|
*
|
|
* This server:
|
|
* 1. Accepts requests with any number of headers
|
|
* 2. Can respond with many headers (triggered by x-response-headers header)
|
|
* 3. Can respond with large trailers (triggered by x-response-trailers header)
|
|
*/
|
|
const http2 = require("node:http2");
|
|
|
|
// Read TLS certs from args
|
|
const tlsCert = JSON.parse(process.argv[2]);
|
|
|
|
const server = http2.createSecureServer({
|
|
key: tlsCert.key,
|
|
cert: tlsCert.cert,
|
|
// Allow up to 2000 header pairs (default is 128)
|
|
maxHeaderListPairs: 2000,
|
|
// Larger settings to avoid ENHANCE_YOUR_CALM
|
|
settings: {
|
|
maxHeaderListSize: 256 * 1024, // 256KB
|
|
},
|
|
});
|
|
|
|
server.on("stream", (stream, headers) => {
|
|
stream.on("error", err => {
|
|
// Ignore stream errors in fixture - test will handle client-side
|
|
console.error("Stream error:", err.message);
|
|
});
|
|
|
|
const path = headers[":path"] || "/";
|
|
|
|
// Count how many headers we received (excluding pseudo-headers)
|
|
const receivedHeaders = Object.keys(headers).filter(h => !h.startsWith(":")).length;
|
|
|
|
// Check if client wants large response headers
|
|
const numResponseHeaders = parseInt(headers["x-response-headers"] || "0", 10);
|
|
|
|
// Check if client wants large trailers
|
|
const numResponseTrailers = parseInt(headers["x-response-trailers"] || "0", 10);
|
|
|
|
// Build response headers
|
|
const responseHeaders = {
|
|
":status": 200,
|
|
"content-type": "application/json",
|
|
};
|
|
|
|
// Add requested number of response headers
|
|
for (let i = 0; i < numResponseHeaders; i++) {
|
|
responseHeaders[`x-response-header-${i}`] = "R".repeat(150);
|
|
}
|
|
|
|
if (numResponseTrailers > 0) {
|
|
// Send response with trailers
|
|
stream.respond(responseHeaders, { waitForTrailers: true });
|
|
|
|
stream.on("wantTrailers", () => {
|
|
const trailers = {};
|
|
for (let i = 0; i < numResponseTrailers; i++) {
|
|
trailers[`x-trailer-${i}`] = "T".repeat(150);
|
|
}
|
|
stream.sendTrailers(trailers);
|
|
});
|
|
|
|
stream.end(
|
|
JSON.stringify({
|
|
receivedHeaders,
|
|
responseHeaders: numResponseHeaders,
|
|
responseTrailers: numResponseTrailers,
|
|
path,
|
|
}),
|
|
);
|
|
} else {
|
|
// Normal response without trailers
|
|
stream.respond(responseHeaders);
|
|
stream.end(
|
|
JSON.stringify({
|
|
receivedHeaders,
|
|
responseHeaders: numResponseHeaders,
|
|
path,
|
|
}),
|
|
);
|
|
}
|
|
});
|
|
|
|
server.on("error", err => {
|
|
console.error("Server error:", err.message);
|
|
});
|
|
|
|
server.listen(0, "127.0.0.1", () => {
|
|
const { port } = server.address();
|
|
process.stdout.write(JSON.stringify({ port, address: "127.0.0.1" }));
|
|
});
|