mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
fix: handle empty chunked gzip responses correctly (#22360)
## Summary Fixes #18413 - Empty chunked gzip responses were causing `Decompression error: ShortRead` ## The Issue When a server sends an empty response with `Content-Encoding: gzip` and `Transfer-Encoding: chunked`, Bun was throwing a `ShortRead` error. This occurred because the code was checking if `avail_in == 0` (no input data) and immediately returning an error, without attempting to decompress what could be a valid empty gzip stream. ## The Fix Instead of checking `avail_in == 0` before calling `inflate()`, we now: 1. Always call `inflate()` even when `avail_in == 0` 2. Check the return code from `inflate()` 3. If it returns `BufError` with `avail_in == 0`, then we truly need more data and return `ShortRead` 4. If it returns `StreamEnd`, it was a valid empty gzip stream and we finish successfully This approach correctly distinguishes between "no data yet" and "valid empty gzip stream". ## Why This Works - A valid empty gzip stream still has headers and trailers (~20 bytes) - The zlib `inflate()` function can handle empty streams correctly - `BufError` with `avail_in == 0` specifically means "need more input data" ## Test Plan ✅ Added regression test in `test/regression/issue/18413.test.ts` covering: - Empty chunked gzip response - Empty non-chunked gzip response - Empty chunked response without gzip ✅ Verified all existing gzip-related tests still pass ✅ Tested with the original failing case from the issue 🤖 Generated with [Claude Code](https://claude.ai/code) --------- Co-authored-by: Claude Bot <claude-bot@bun.sh> Co-authored-by: Claude <noreply@anthropic.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com> Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
This commit is contained in:
@@ -158,9 +158,11 @@ pub const BrotliReaderArrayList = struct {
|
||||
}
|
||||
this.state = .Inflating;
|
||||
if (is_done) {
|
||||
// Stream is truncated - we're at EOF but decoder needs more data
|
||||
this.state = .Error;
|
||||
return error.BrotliDecompressionError;
|
||||
}
|
||||
|
||||
// Not at EOF - we can retry with more data
|
||||
return error.ShortRead;
|
||||
},
|
||||
.needs_more_output => {
|
||||
|
||||
@@ -1576,7 +1576,7 @@ pub const JSZlib = struct {
|
||||
return globalThis.throwError(err, "Zlib error") catch return .zero;
|
||||
};
|
||||
|
||||
reader.readAll() catch {
|
||||
reader.readAll(true) catch {
|
||||
defer reader.deinit();
|
||||
return globalThis.throwValue(ZigString.init(reader.errorMessage() orelse "Zlib returned an error").toErrorInstance(globalThis));
|
||||
};
|
||||
|
||||
@@ -327,8 +327,8 @@ pub const CreateCommand = struct {
|
||||
|
||||
var tarball_buf_list = std.ArrayListUnmanaged(u8){ .capacity = file_buf.len, .items = file_buf };
|
||||
var gunzip = try Zlib.ZlibReaderArrayList.init(tarball_bytes.list.items, &tarball_buf_list, ctx.allocator);
|
||||
try gunzip.readAll();
|
||||
gunzip.deinit();
|
||||
defer gunzip.deinit();
|
||||
try gunzip.readAll(true);
|
||||
|
||||
node.name = try ProgressBuf.print("Extracting {s}", .{template});
|
||||
node.setCompletedItems(0);
|
||||
|
||||
@@ -233,7 +233,7 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc
|
||||
// Return error without printing - let caller handle the messaging
|
||||
return error.InvalidResponse;
|
||||
};
|
||||
gunzip.readAll() catch {
|
||||
gunzip.readAll(true) catch {
|
||||
node.end();
|
||||
// Return error without printing - let caller handle the messaging
|
||||
return error.InvalidResponse;
|
||||
|
||||
@@ -174,8 +174,11 @@ pub const ZstdReaderArrayList = struct {
|
||||
if (bytes_read == next_in.len) {
|
||||
this.state = .Inflating;
|
||||
if (is_done) {
|
||||
// Stream is truncated - we're at EOF but need more data
|
||||
this.state = .Error;
|
||||
return error.ZstdDecompressionError;
|
||||
}
|
||||
// Not at EOF - we can retry with more data
|
||||
return error.ShortRead;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ pub const Decompressor = union(enum) {
|
||||
|
||||
pub fn readAll(this: *Decompressor, is_done: bool) !void {
|
||||
switch (this.*) {
|
||||
.zlib => |zlib| try zlib.readAll(),
|
||||
.zlib => |zlib| try zlib.readAll(is_done),
|
||||
.brotli => |brotli| try brotli.readAll(is_done),
|
||||
.zstd => |reader| try reader.readAll(is_done),
|
||||
.none => {},
|
||||
|
||||
@@ -23,7 +23,7 @@ pub fn decompress(compressed_data: []const u8, output: *MutableString, allocator
|
||||
.windowBits = 15 + 32,
|
||||
},
|
||||
);
|
||||
try reader.readAll();
|
||||
try reader.readAll(true);
|
||||
reader.deinit();
|
||||
}
|
||||
|
||||
|
||||
@@ -197,7 +197,7 @@ fn extract(this: *const ExtractTarball, log: *logger.Log, tgz_bytes: []const u8)
|
||||
if (needs_to_decompress) {
|
||||
zlib_pool.data.list.clearRetainingCapacity();
|
||||
var zlib_entry = try Zlib.ZlibReaderArrayList.init(tgz_bytes, &zlib_pool.data.list, default_allocator);
|
||||
zlib_entry.readAll() catch |err| {
|
||||
zlib_entry.readAll(true) catch |err| {
|
||||
log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
|
||||
48
src/zlib.zig
48
src/zlib.zig
@@ -209,7 +209,7 @@ pub fn NewZlibReader(comptime Writer: type, comptime buffer_size: usize) type {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn readAll(this: *ZlibReader) !void {
|
||||
pub fn readAll(this: *ZlibReader, is_done: bool) !void {
|
||||
while (this.state == State.Uninitialized or this.state == State.Inflating) {
|
||||
|
||||
// Before the call of inflate(), the application should ensure
|
||||
@@ -247,11 +247,8 @@ pub fn NewZlibReader(comptime Writer: type, comptime buffer_size: usize) type {
|
||||
this.zlib.next_out = &this.buf;
|
||||
}
|
||||
|
||||
if (this.zlib.avail_in == 0) {
|
||||
return error.ShortRead;
|
||||
}
|
||||
|
||||
const rc = inflate(&this.zlib, FlushValue.PartialFlush);
|
||||
// Try to inflate even if avail_in is 0, as this could be a valid empty gzip stream
|
||||
const rc = inflate(&this.zlib, FlushValue.NoFlush);
|
||||
this.state = State.Inflating;
|
||||
|
||||
switch (rc) {
|
||||
@@ -269,9 +266,22 @@ pub fn NewZlibReader(comptime Writer: type, comptime buffer_size: usize) type {
|
||||
this.state = State.Error;
|
||||
return error.OutOfMemory;
|
||||
},
|
||||
ReturnCode.BufError => {
|
||||
// BufError with avail_in == 0 means we need more input data
|
||||
if (this.zlib.avail_in == 0) {
|
||||
if (is_done) {
|
||||
// Stream is truncated - we're at EOF but decoder needs more data
|
||||
this.state = State.Error;
|
||||
return error.ZlibError;
|
||||
}
|
||||
// Not at EOF - we can retry with more data
|
||||
return error.ShortRead;
|
||||
}
|
||||
this.state = State.Error;
|
||||
return error.ZlibError;
|
||||
},
|
||||
ReturnCode.StreamError,
|
||||
ReturnCode.DataError,
|
||||
ReturnCode.BufError,
|
||||
ReturnCode.NeedDict,
|
||||
ReturnCode.VersionError,
|
||||
ReturnCode.ErrNo,
|
||||
@@ -420,7 +430,7 @@ pub const ZlibReaderArrayList = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn readAll(this: *ZlibReader) ZlibError!void {
|
||||
pub fn readAll(this: *ZlibReader, is_done: bool) ZlibError!void {
|
||||
defer {
|
||||
if (this.list.items.len > this.zlib.total_out) {
|
||||
this.list.shrinkRetainingCapacity(this.zlib.total_out);
|
||||
@@ -466,11 +476,8 @@ pub const ZlibReaderArrayList = struct {
|
||||
this.zlib.avail_out = @truncate(this.list.items.len -| initial);
|
||||
}
|
||||
|
||||
if (this.zlib.avail_in == 0) {
|
||||
return error.ShortRead;
|
||||
}
|
||||
|
||||
const rc = inflate(&this.zlib, FlushValue.PartialFlush);
|
||||
// Try to inflate even if avail_in is 0, as this could be a valid empty gzip stream
|
||||
const rc = inflate(&this.zlib, FlushValue.NoFlush);
|
||||
this.state = State.Inflating;
|
||||
|
||||
switch (rc) {
|
||||
@@ -482,9 +489,22 @@ pub const ZlibReaderArrayList = struct {
|
||||
this.state = State.Error;
|
||||
return error.OutOfMemory;
|
||||
},
|
||||
ReturnCode.BufError => {
|
||||
// BufError with avail_in == 0 means we need more input data
|
||||
if (this.zlib.avail_in == 0) {
|
||||
if (is_done) {
|
||||
// Stream is truncated - we're at EOF but decoder needs more data
|
||||
this.state = State.Error;
|
||||
return error.ZlibError;
|
||||
}
|
||||
// Not at EOF - we can retry with more data
|
||||
return error.ShortRead;
|
||||
}
|
||||
this.state = State.Error;
|
||||
return error.ZlibError;
|
||||
},
|
||||
ReturnCode.StreamError,
|
||||
ReturnCode.DataError,
|
||||
ReturnCode.BufError,
|
||||
ReturnCode.NeedDict,
|
||||
ReturnCode.VersionError,
|
||||
ReturnCode.ErrNo,
|
||||
|
||||
@@ -1245,7 +1245,7 @@ describe("fetch() with streaming", () => {
|
||||
expect((err as Error).code).toBe("BrotliDecompressionError");
|
||||
} else if (compression === "deflate-libdeflate") {
|
||||
expect((err as Error).name).toBe("Error");
|
||||
expect((err as Error).code).toBe("ShortRead");
|
||||
expect((err as Error).code).toBe("ZlibError");
|
||||
} else if (compression === "zstd") {
|
||||
expect((err as Error).name).toBe("Error");
|
||||
expect((err as Error).code).toBe("ZstdDecompressionError");
|
||||
|
||||
183
test/regression/issue/18413-all-compressions.test.ts
Normal file
183
test/regression/issue/18413-all-compressions.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { serve } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
/**
|
||||
* Comprehensive test to ensure all compression algorithms handle empty streams correctly
|
||||
* Related to issue #18413 - we fixed this for gzip, now verifying brotli and zstd work too
|
||||
*/
|
||||
|
||||
test("empty chunked brotli response should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty brotli buffer using the proper API
|
||||
const { brotliCompressSync } = require("node:zlib");
|
||||
const emptyBrotli = brotliCompressSync(Buffer.alloc(0));
|
||||
|
||||
// Return as chunked response
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(emptyBrotli);
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "br",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
// Should not throw decompression error
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty non-chunked brotli response", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty brotli buffer using the proper API
|
||||
const { brotliCompressSync } = require("node:zlib");
|
||||
const emptyBrotli = brotliCompressSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(emptyBrotli, {
|
||||
headers: {
|
||||
"Content-Encoding": "br",
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": emptyBrotli.length.toString(),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty chunked zstd response should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty zstd buffer using the proper API
|
||||
const emptyZstd = Bun.zstdCompressSync(Buffer.alloc(0));
|
||||
|
||||
// Return as chunked response
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(emptyZstd);
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "zstd",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
// Should not throw decompression error
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty non-chunked zstd response", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty zstd buffer using the proper API
|
||||
const emptyZstd = Bun.zstdCompressSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(emptyZstd, {
|
||||
headers: {
|
||||
"Content-Encoding": "zstd",
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": emptyZstd.length.toString(),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty chunked deflate response should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty deflate buffer
|
||||
const emptyDeflate = Bun.deflateSync(Buffer.alloc(0));
|
||||
|
||||
// Return as chunked response
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(emptyDeflate);
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
// Should not throw decompression error
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty non-chunked deflate response", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty deflate buffer
|
||||
const emptyDeflate = Bun.deflateSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(emptyDeflate, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": emptyDeflate.length.toString(),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
248
test/regression/issue/18413-deflate-semantics.test.ts
Normal file
248
test/regression/issue/18413-deflate-semantics.test.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import { serve } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
import { deflateRawSync, deflateSync } from "node:zlib";
|
||||
|
||||
/**
|
||||
* Test deflate semantics - both zlib-wrapped and raw deflate
|
||||
*
|
||||
* HTTP Content-Encoding: deflate is ambiguous:
|
||||
* - RFC 2616 (HTTP/1.1) says it should be zlib format (RFC 1950)
|
||||
* - Many implementations incorrectly use raw deflate (RFC 1951)
|
||||
*
|
||||
* Bun should handle both gracefully, auto-detecting the format.
|
||||
*/
|
||||
|
||||
// Test data
|
||||
const testData = Buffer.from("Hello, World! This is a test of deflate encoding.");
|
||||
|
||||
// Test zlib-wrapped deflate (RFC 1950 - has 2-byte header and 4-byte Adler32 trailer)
|
||||
test("deflate with zlib wrapper should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create zlib-wrapped deflate (this is what the spec says deflate should be)
|
||||
const compressed = deflateSync(testData);
|
||||
|
||||
// Verify it has a zlib header: CMF must be 0x78 and (CMF<<8 | FLG) % 31 == 0
|
||||
expect(compressed[0]).toBe(0x78);
|
||||
expect(((compressed[0] << 8) | compressed[1]) % 31).toBe(0);
|
||||
return new Response(compressed, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe(testData.toString());
|
||||
});
|
||||
|
||||
// Test raw deflate (RFC 1951 - no header/trailer, just compressed data)
|
||||
test("raw deflate without zlib wrapper should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create raw deflate (no zlib wrapper)
|
||||
const compressed = deflateRawSync(testData);
|
||||
|
||||
// Verify it doesn't have zlib header (shouldn't start with 0x78)
|
||||
expect(compressed[0]).not.toBe(0x78);
|
||||
|
||||
return new Response(compressed, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe(testData.toString());
|
||||
});
|
||||
|
||||
// Test empty zlib-wrapped deflate
|
||||
test("empty zlib-wrapped deflate should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const compressed = deflateSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(compressed, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
// Test empty raw deflate
|
||||
test("empty raw deflate should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const compressed = deflateRawSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(compressed, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
// Test chunked zlib-wrapped deflate
|
||||
test("chunked zlib-wrapped deflate should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const compressed = deflateSync(testData);
|
||||
const mid = Math.floor(compressed.length / 2);
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
controller.enqueue(compressed.slice(0, mid));
|
||||
await Bun.sleep(50);
|
||||
controller.enqueue(compressed.slice(mid));
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe(testData.toString());
|
||||
});
|
||||
|
||||
// Test chunked raw deflate
|
||||
test("chunked raw deflate should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const compressed = deflateRawSync(testData);
|
||||
const mid = Math.floor(compressed.length / 2);
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
controller.enqueue(compressed.slice(0, mid));
|
||||
await Bun.sleep(50);
|
||||
controller.enqueue(compressed.slice(mid));
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe(testData.toString());
|
||||
});
|
||||
|
||||
// Test truncated zlib-wrapped deflate (missing trailer)
|
||||
test("truncated zlib-wrapped deflate should fail", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const compressed = deflateSync(testData);
|
||||
// Remove the 4-byte Adler32 trailer
|
||||
const truncated = compressed.slice(0, -4);
|
||||
|
||||
return new Response(truncated, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code).toMatch(/ZlibError|ShortRead/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test invalid deflate data (not deflate at all)
|
||||
test("invalid deflate data should fail", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Random bytes that are neither zlib-wrapped nor raw deflate
|
||||
const invalid = new Uint8Array([0xff, 0xfe, 0xfd, 0xfc, 0xfb]);
|
||||
|
||||
return new Response(invalid, {
|
||||
headers: {
|
||||
"Content-Encoding": "deflate",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code).toMatch(/ZlibError/);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Documentation of deflate semantics in Bun:
|
||||
*
|
||||
* When Content-Encoding: deflate is received, Bun's HTTP client should:
|
||||
* 1. Attempt to decompress as zlib format (RFC 1950) first
|
||||
* 2. If that fails with a header error, retry as raw deflate (RFC 1951)
|
||||
* 3. This handles both correct implementations and common misimplementations
|
||||
*
|
||||
* The zlib format has:
|
||||
* - 2-byte header with compression method and flags
|
||||
* - Compressed data using DEFLATE algorithm
|
||||
* - 4-byte Adler-32 checksum trailer
|
||||
*
|
||||
* Raw deflate has:
|
||||
* - Just the compressed data, no header or trailer
|
||||
*
|
||||
* Empty streams:
|
||||
* - Empty zlib-wrapped: Has header and trailer, total ~8 bytes
|
||||
* - Empty raw deflate: Minimal DEFLATE stream, ~2-3 bytes
|
||||
*/
|
||||
292
test/regression/issue/18413-truncation.test.ts
Normal file
292
test/regression/issue/18413-truncation.test.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { serve } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
import { brotliCompressSync } from "node:zlib";
|
||||
|
||||
/**
|
||||
* Comprehensive truncation and edge case tests for all compression formats
|
||||
* Related to issue #18413 - Testing proper handling of truncated streams,
|
||||
* empty streams, and delayed chunks.
|
||||
*/
|
||||
|
||||
// Helper to create a server that sends truncated compressed data
|
||||
function createTruncatedServer(compression: "gzip" | "br" | "zstd" | "deflate", truncateBytes: number = 1) {
|
||||
return serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
let compressed: Uint8Array;
|
||||
const data = Buffer.from("Hello World! This is a test message.");
|
||||
|
||||
switch (compression) {
|
||||
case "gzip":
|
||||
compressed = Bun.gzipSync(data);
|
||||
break;
|
||||
case "br":
|
||||
compressed = brotliCompressSync(data);
|
||||
break;
|
||||
case "zstd":
|
||||
compressed = Bun.zstdCompressSync(data);
|
||||
break;
|
||||
case "deflate":
|
||||
compressed = Bun.deflateSync(data);
|
||||
break;
|
||||
}
|
||||
|
||||
// Truncate the compressed data
|
||||
const truncated = compressed.slice(0, compressed.length - truncateBytes);
|
||||
|
||||
return new Response(truncated, {
|
||||
headers: {
|
||||
"Content-Encoding": compression,
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": truncated.length.toString(),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to create a server that sends data in delayed chunks
|
||||
function createDelayedChunksServer(compression: "gzip" | "br" | "zstd" | "deflate", delayMs: number = 100) {
|
||||
return serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
let compressed: Uint8Array;
|
||||
const data = Buffer.from("Hello World! This is a test message.");
|
||||
|
||||
switch (compression) {
|
||||
case "gzip":
|
||||
compressed = Bun.gzipSync(data);
|
||||
break;
|
||||
case "br":
|
||||
compressed = brotliCompressSync(data);
|
||||
break;
|
||||
case "zstd":
|
||||
compressed = Bun.zstdCompressSync(data);
|
||||
break;
|
||||
case "deflate":
|
||||
compressed = Bun.deflateSync(data);
|
||||
break;
|
||||
}
|
||||
|
||||
// Split compressed data into chunks
|
||||
const mid = Math.floor(compressed.length / 2);
|
||||
const chunk1 = compressed.slice(0, mid);
|
||||
const chunk2 = compressed.slice(mid);
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
// Send first chunk
|
||||
controller.enqueue(chunk1);
|
||||
// Delay before sending second chunk
|
||||
await Bun.sleep(delayMs);
|
||||
controller.enqueue(chunk2);
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": compression,
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Test truncated gzip stream
|
||||
test("truncated gzip stream should throw error", async () => {
|
||||
using server = createTruncatedServer("gzip", 5);
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/ZlibError|ShortRead/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test truncated brotli stream
|
||||
test("truncated brotli stream should throw error", async () => {
|
||||
using server = createTruncatedServer("br", 5);
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/BrotliDecompressionError/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test truncated zstd stream
|
||||
test("truncated zstd stream should throw error", async () => {
|
||||
using server = createTruncatedServer("zstd", 5);
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/ZstdDecompressionError/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test truncated deflate stream
|
||||
test("truncated deflate stream should throw error", async () => {
|
||||
using server = createTruncatedServer("deflate", 1);
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/ZlibError|ShortRead/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test delayed chunks for gzip (should succeed)
|
||||
test("gzip with delayed chunks should succeed", async () => {
|
||||
using server = createDelayedChunksServer("gzip", 50);
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("Hello World! This is a test message.");
|
||||
});
|
||||
|
||||
// Test delayed chunks for brotli (should succeed)
|
||||
test("brotli with delayed chunks should succeed", async () => {
|
||||
using server = createDelayedChunksServer("br", 50);
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("Hello World! This is a test message.");
|
||||
});
|
||||
|
||||
// Test delayed chunks for zstd (should succeed)
|
||||
test("zstd with delayed chunks should succeed", async () => {
|
||||
using server = createDelayedChunksServer("zstd", 50);
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("Hello World! This is a test message.");
|
||||
});
|
||||
|
||||
// Test delayed chunks for deflate (should succeed)
|
||||
test("deflate with delayed chunks should succeed", async () => {
|
||||
using server = createDelayedChunksServer("deflate", 50);
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("Hello World! This is a test message.");
|
||||
});
|
||||
|
||||
// Test mismatched Content-Encoding
|
||||
test("mismatched Content-Encoding should fail gracefully", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Send gzip data but claim it's brotli
|
||||
const gzipped = Bun.gzipSync(Buffer.from("Hello World"));
|
||||
|
||||
return new Response(gzipped, {
|
||||
headers: {
|
||||
"Content-Encoding": "br",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/BrotliDecompressionError/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test sending zero-byte compressed body
|
||||
test("zero-byte body with gzip Content-Encoding and Content-Length: 0", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
return new Response(new Uint8Array(0), {
|
||||
headers: {
|
||||
"Content-Encoding": "gzip",
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": "0",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// When Content-Length is 0, the decompressor is not invoked, so this succeeds
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
// Test sending invalid compressed data
|
||||
test("invalid gzip data should fail", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Send random bytes claiming to be gzip
|
||||
const invalid = new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff]);
|
||||
|
||||
return new Response(invalid, {
|
||||
headers: {
|
||||
"Content-Encoding": "gzip",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
await response.text();
|
||||
expect.unreachable("Should have thrown decompression error");
|
||||
} catch (err: any) {
|
||||
expect(err.code || err.name || err.message).toMatch(/ZlibError/);
|
||||
}
|
||||
});
|
||||
|
||||
// Test sending first chunk delayed with empty initial chunk
|
||||
test("empty first chunk followed by valid gzip should succeed", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
const gzipped = Bun.gzipSync(Buffer.from("Hello World"));
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
// Send empty chunk first
|
||||
controller.enqueue(new Uint8Array(0));
|
||||
await Bun.sleep(50);
|
||||
// Then send the actual compressed data
|
||||
controller.enqueue(gzipped);
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Encoding": "gzip",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
const text = await response.text();
|
||||
expect(text).toBe("Hello World");
|
||||
});
|
||||
97
test/regression/issue/18413.test.ts
Normal file
97
test/regression/issue/18413.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { serve } from "bun";
|
||||
import { expect, test } from "bun:test";
|
||||
import { Readable } from "node:stream";
|
||||
import { createGzip } from "node:zlib";
|
||||
|
||||
/**
|
||||
* Regression test for issue #18413
|
||||
* "Decompression error: ShortRead - empty chunked gzip response breaks fetch()"
|
||||
*
|
||||
* The issue was in Bun's zlib.zig implementation, which was incorrectly returning
|
||||
* error.ShortRead when encountering empty gzip streams (when avail_in == 0).
|
||||
*
|
||||
* The fix is to call inflate() even when avail_in == 0, as this could be a valid
|
||||
* empty gzip stream with proper headers/trailers. If inflate returns BufError
|
||||
* with avail_in == 0, then we know we truly need more data and can return ShortRead.
|
||||
*/
|
||||
|
||||
test("empty chunked gzip response should work", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty gzip stream
|
||||
const gzipStream = createGzip();
|
||||
gzipStream.end(); // End immediately without writing data
|
||||
|
||||
// Convert to web stream
|
||||
const webStream = Readable.toWeb(gzipStream);
|
||||
|
||||
return new Response(webStream, {
|
||||
headers: {
|
||||
"Content-Encoding": "gzip",
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
// This should not throw "Decompression error: ShortRead"
|
||||
const text = await response.text();
|
||||
expect(text).toBe(""); // Empty response
|
||||
});
|
||||
|
||||
test("empty gzip response without chunked encoding", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
// Create an empty gzip buffer
|
||||
const emptyGzip = Bun.gzipSync(Buffer.alloc(0));
|
||||
|
||||
return new Response(emptyGzip, {
|
||||
headers: {
|
||||
"Content-Encoding": "gzip",
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": emptyGzip.length.toString(),
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
|
||||
test("empty chunked response without gzip", async () => {
|
||||
using server = serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
start(controller) {
|
||||
// Just close immediately
|
||||
controller.close();
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Transfer-Encoding": "chunked",
|
||||
"Content-Type": "text/plain",
|
||||
},
|
||||
},
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`http://localhost:${server.port}`);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const text = await response.text();
|
||||
expect(text).toBe("");
|
||||
});
|
||||
Reference in New Issue
Block a user