mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
add brotli and zstd to CompressionStream and DecompressionStream types (#25374)
### What does this PR do? - removes the `Unimplemented in Bun` comment on `CompressionStream` and `DecompressionStream` - updates the types for `CompressionStream` and `DecompressionStream` to add a new internal `CompressionFormat` type to the constructor, which adds `brotli` and `zstd` to the union - adds tests for brotli and zstd usage - adds lib.dom.d.ts exclusions for brotli and zstd as these don't exist in the DOM version of CompressionFormat fixes #25367 ### How did you verify your code works? typechecks and tests
This commit is contained in:
3
packages/bun-types/bun.d.ts
vendored
3
packages/bun-types/bun.d.ts
vendored
@@ -3887,6 +3887,9 @@ declare module "bun" {
|
||||
static readonly byteLength: 32;
|
||||
}
|
||||
|
||||
/** Extends the standard web formats with `brotli` and `zstd` support. */
|
||||
type CompressionFormat = "gzip" | "deflate" | "deflate-raw" | "brotli" | "zstd";
|
||||
|
||||
/** Compression options for `Bun.deflateSync` and `Bun.gzipSync` */
|
||||
interface ZlibCompressionOptions {
|
||||
/**
|
||||
|
||||
42
packages/bun-types/globals.d.ts
vendored
42
packages/bun-types/globals.d.ts
vendored
@@ -83,6 +83,24 @@ declare var WritableStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
{
|
||||
prototype: CompressionStream;
|
||||
new (format: Bun.CompressionFormat): CompressionStream;
|
||||
}
|
||||
>;
|
||||
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebDecompressionStream {}
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
{
|
||||
prototype: DecompressionStream;
|
||||
new (format: Bun.CompressionFormat): DecompressionStream;
|
||||
}
|
||||
>;
|
||||
|
||||
interface Worker extends Bun.__internal.LibWorkerOrBunWorker {}
|
||||
declare var Worker: Bun.__internal.UseLibDomIfAvailable<
|
||||
"Worker",
|
||||
@@ -278,30 +296,6 @@ declare var Event: {
|
||||
new (type: string, eventInitDict?: Bun.EventInit): Event;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface CompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var CompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"CompressionStream",
|
||||
typeof import("node:stream/web").CompressionStream
|
||||
>;
|
||||
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
interface DecompressionStream extends Bun.__internal.LibEmptyOrNodeStreamWebCompressionStream {}
|
||||
/**
|
||||
* Unimplemented in Bun
|
||||
*/
|
||||
declare var DecompressionStream: Bun.__internal.UseLibDomIfAvailable<
|
||||
"DecompressionStream",
|
||||
typeof import("node:stream/web").DecompressionStream
|
||||
>;
|
||||
|
||||
interface EventTarget {
|
||||
/**
|
||||
* Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value.
|
||||
|
||||
@@ -525,6 +525,26 @@ describe("@types/bun integration test", () => {
|
||||
"line": "streams.ts:49:19",
|
||||
"message": "Property 'blob' does not exist on type 'ReadableStream<Uint8Array<ArrayBufferLike>>'.",
|
||||
},
|
||||
{
|
||||
code: 2345,
|
||||
line: "streams.ts:63:66",
|
||||
message: "Argument of type '\"brotli\"' is not assignable to parameter of type 'CompressionFormat'.",
|
||||
},
|
||||
{
|
||||
code: 2345,
|
||||
line: "streams.ts:63:113",
|
||||
message: "Argument of type '\"brotli\"' is not assignable to parameter of type 'CompressionFormat'.",
|
||||
},
|
||||
{
|
||||
code: 2345,
|
||||
line: "streams.ts:64:66",
|
||||
message: "Argument of type '\"zstd\"' is not assignable to parameter of type 'CompressionFormat'.",
|
||||
},
|
||||
{
|
||||
code: 2345,
|
||||
line: "streams.ts:64:111",
|
||||
message: "Argument of type '\"zstd\"' is not assignable to parameter of type 'CompressionFormat'.",
|
||||
},
|
||||
{
|
||||
code: 2353,
|
||||
line: "websocket.ts:25:5",
|
||||
|
||||
@@ -60,6 +60,8 @@ expectType(node_stream.blob()).is<Promise<Blob>>();
|
||||
Bun.file("./foo.csv").stream().pipeThrough(new TextDecoderStream()).pipeThrough(new TextEncoderStream());
|
||||
|
||||
Bun.file("./foo.csv").stream().pipeThrough(new CompressionStream("gzip")).pipeThrough(new DecompressionStream("gzip"));
|
||||
Bun.file("./foo.csv").stream().pipeThrough(new CompressionStream("brotli")).pipeThrough(new DecompressionStream("brotli"));
|
||||
Bun.file("./foo.csv").stream().pipeThrough(new CompressionStream("zstd")).pipeThrough(new DecompressionStream("zstd"));
|
||||
|
||||
Bun.file("./foo.csv")
|
||||
.stream()
|
||||
|
||||
251
test/js/web/streams/compression.test.ts
Normal file
251
test/js/web/streams/compression.test.ts
Normal file
@@ -0,0 +1,251 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
describe("CompressionStream and DecompressionStream", () => {
|
||||
describe("brotli", () => {
|
||||
test("compresses data with brotli", async () => {
|
||||
const input = "Hello, Bun! This is a test string for brotli compression.";
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
const compressionStream = new CompressionStream("brotli");
|
||||
const writer = compressionStream.writable.getWriter();
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const compressedChunks: Uint8Array[] = [];
|
||||
const reader = compressionStream.readable.getReader();
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
compressedChunks.push(value);
|
||||
}
|
||||
|
||||
expect(compressedChunks.length).toBeGreaterThan(0);
|
||||
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
expect(totalLength).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("decompresses brotli data", async () => {
|
||||
const input = "Hello, Bun! This is a test string for brotli decompression.";
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
// First compress
|
||||
const compressionStream = new CompressionStream("brotli");
|
||||
const writer = compressionStream.writable.getWriter();
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const compressedChunks: Uint8Array[] = [];
|
||||
const reader = compressionStream.readable.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
compressedChunks.push(value);
|
||||
}
|
||||
|
||||
// Concatenate compressed chunks
|
||||
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
const compressed = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of compressedChunks) {
|
||||
compressed.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
// Then decompress
|
||||
const decompressionStream = new DecompressionStream("brotli");
|
||||
const decompWriter = decompressionStream.writable.getWriter();
|
||||
decompWriter.write(compressed);
|
||||
decompWriter.close();
|
||||
|
||||
const decompressedChunks: Uint8Array[] = [];
|
||||
const decompReader = decompressionStream.readable.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await decompReader.read();
|
||||
if (done) break;
|
||||
decompressedChunks.push(value);
|
||||
}
|
||||
|
||||
const decompressedLength = decompressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
const decompressed = new Uint8Array(decompressedLength);
|
||||
offset = 0;
|
||||
for (const chunk of decompressedChunks) {
|
||||
decompressed.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
const output = decoder.decode(decompressed);
|
||||
expect(output).toBe(input);
|
||||
});
|
||||
|
||||
test("round-trip compression with brotli", async () => {
|
||||
const testData = [
|
||||
"Simple string",
|
||||
Buffer.alloc(1000, "A").toString(),
|
||||
"Mixed 123 !@# symbols",
|
||||
"",
|
||||
JSON.stringify({ nested: { object: "value" } }),
|
||||
];
|
||||
|
||||
for (const input of testData) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
// Compress and decompress
|
||||
const compressed = await new Response(
|
||||
new Blob([data]).stream().pipeThrough(new CompressionStream("brotli")),
|
||||
).arrayBuffer();
|
||||
|
||||
const decompressed = await new Response(
|
||||
new Blob([compressed]).stream().pipeThrough(new DecompressionStream("brotli")),
|
||||
).arrayBuffer();
|
||||
|
||||
const output = decoder.decode(decompressed);
|
||||
expect(output).toBe(input);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("zstd", () => {
|
||||
test("compresses data with zstd", async () => {
|
||||
const input = "Hello, Bun! This is a test string for zstd compression.";
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
const compressionStream = new CompressionStream("zstd");
|
||||
const writer = compressionStream.writable.getWriter();
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const compressedChunks: Uint8Array[] = [];
|
||||
const reader = compressionStream.readable.getReader();
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
compressedChunks.push(value);
|
||||
}
|
||||
|
||||
expect(compressedChunks.length).toBeGreaterThan(0);
|
||||
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
expect(totalLength).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("decompresses zstd data", async () => {
|
||||
const input = "Hello, Bun! This is a test string for zstd decompression.";
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
// First compress
|
||||
const compressionStream = new CompressionStream("zstd");
|
||||
const writer = compressionStream.writable.getWriter();
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const compressedChunks: Uint8Array[] = [];
|
||||
const reader = compressionStream.readable.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
compressedChunks.push(value);
|
||||
}
|
||||
|
||||
// Concatenate compressed chunks
|
||||
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
const compressed = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of compressedChunks) {
|
||||
compressed.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
// Then decompress
|
||||
const decompressionStream = new DecompressionStream("zstd");
|
||||
const decompWriter = decompressionStream.writable.getWriter();
|
||||
decompWriter.write(compressed);
|
||||
decompWriter.close();
|
||||
|
||||
const decompressedChunks: Uint8Array[] = [];
|
||||
const decompReader = decompressionStream.readable.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await decompReader.read();
|
||||
if (done) break;
|
||||
decompressedChunks.push(value);
|
||||
}
|
||||
|
||||
const decompressedLength = decompressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||
const decompressed = new Uint8Array(decompressedLength);
|
||||
offset = 0;
|
||||
for (const chunk of decompressedChunks) {
|
||||
decompressed.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
const output = decoder.decode(decompressed);
|
||||
expect(output).toBe(input);
|
||||
});
|
||||
|
||||
test("round-trip compression with zstd", async () => {
|
||||
const testData = [
|
||||
"Simple string",
|
||||
Buffer.alloc(1000, "A").toString(),
|
||||
"Mixed 123 !@# symbols",
|
||||
"",
|
||||
JSON.stringify({ nested: { object: "value" } }),
|
||||
];
|
||||
|
||||
for (const input of testData) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
// Compress and decompress
|
||||
const compressed = await new Response(
|
||||
new Blob([data]).stream().pipeThrough(new CompressionStream("zstd")),
|
||||
).arrayBuffer();
|
||||
|
||||
const decompressed = await new Response(
|
||||
new Blob([compressed]).stream().pipeThrough(new DecompressionStream("zstd")),
|
||||
).arrayBuffer();
|
||||
|
||||
const output = decoder.decode(decompressed);
|
||||
expect(output).toBe(input);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("all formats", () => {
|
||||
test("works with all compression formats", async () => {
|
||||
const formats: Array<"gzip" | "deflate" | "deflate-raw" | "brotli" | "zstd"> = [
|
||||
"gzip",
|
||||
"deflate",
|
||||
"deflate-raw",
|
||||
"brotli",
|
||||
"zstd",
|
||||
];
|
||||
|
||||
const input = "Test data for all compression formats!";
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
const data = encoder.encode(input);
|
||||
|
||||
for (const format of formats) {
|
||||
const compressed = await new Response(
|
||||
new Blob([data]).stream().pipeThrough(new CompressionStream(format)),
|
||||
).arrayBuffer();
|
||||
|
||||
const decompressed = await new Response(
|
||||
new Blob([compressed]).stream().pipeThrough(new DecompressionStream(format)),
|
||||
).arrayBuffer();
|
||||
|
||||
const output = decoder.decode(decompressed);
|
||||
expect(output).toBe(input);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user