mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
### What does this PR do? - removes the `Unimplemented in Bun` comment on `CompressionStream` and `DecompressionStream` - updates the types for `CompressionStream` and `DecompressionStream` to add a new internal `CompressionFormat` type to the constructor, which adds `brotli` and `zstd` to the union - adds tests for brotli and zstd usage - adds lib.dom.d.ts exclusions for brotli and zstd as these don't exist in the DOM version of CompressionFormat fixes #25367 ### How did you verify your code works? typechecks and tests
252 lines
8.3 KiB
TypeScript
252 lines
8.3 KiB
TypeScript
import { describe, expect, test } from "bun:test";
|
|
|
|
describe("CompressionStream and DecompressionStream", () => {
|
|
describe("brotli", () => {
|
|
test("compresses data with brotli", async () => {
|
|
const input = "Hello, Bun! This is a test string for brotli compression.";
|
|
const encoder = new TextEncoder();
|
|
const data = encoder.encode(input);
|
|
|
|
const compressionStream = new CompressionStream("brotli");
|
|
const writer = compressionStream.writable.getWriter();
|
|
writer.write(data);
|
|
writer.close();
|
|
|
|
const compressedChunks: Uint8Array[] = [];
|
|
const reader = compressionStream.readable.getReader();
|
|
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
compressedChunks.push(value);
|
|
}
|
|
|
|
expect(compressedChunks.length).toBeGreaterThan(0);
|
|
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
expect(totalLength).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("decompresses brotli data", async () => {
|
|
const input = "Hello, Bun! This is a test string for brotli decompression.";
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
const data = encoder.encode(input);
|
|
|
|
// First compress
|
|
const compressionStream = new CompressionStream("brotli");
|
|
const writer = compressionStream.writable.getWriter();
|
|
writer.write(data);
|
|
writer.close();
|
|
|
|
const compressedChunks: Uint8Array[] = [];
|
|
const reader = compressionStream.readable.getReader();
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
compressedChunks.push(value);
|
|
}
|
|
|
|
// Concatenate compressed chunks
|
|
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
const compressed = new Uint8Array(totalLength);
|
|
let offset = 0;
|
|
for (const chunk of compressedChunks) {
|
|
compressed.set(chunk, offset);
|
|
offset += chunk.length;
|
|
}
|
|
|
|
// Then decompress
|
|
const decompressionStream = new DecompressionStream("brotli");
|
|
const decompWriter = decompressionStream.writable.getWriter();
|
|
decompWriter.write(compressed);
|
|
decompWriter.close();
|
|
|
|
const decompressedChunks: Uint8Array[] = [];
|
|
const decompReader = decompressionStream.readable.getReader();
|
|
while (true) {
|
|
const { done, value } = await decompReader.read();
|
|
if (done) break;
|
|
decompressedChunks.push(value);
|
|
}
|
|
|
|
const decompressedLength = decompressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
const decompressed = new Uint8Array(decompressedLength);
|
|
offset = 0;
|
|
for (const chunk of decompressedChunks) {
|
|
decompressed.set(chunk, offset);
|
|
offset += chunk.length;
|
|
}
|
|
|
|
const output = decoder.decode(decompressed);
|
|
expect(output).toBe(input);
|
|
});
|
|
|
|
test("round-trip compression with brotli", async () => {
|
|
const testData = [
|
|
"Simple string",
|
|
Buffer.alloc(1000, "A").toString(),
|
|
"Mixed 123 !@# symbols",
|
|
"",
|
|
JSON.stringify({ nested: { object: "value" } }),
|
|
];
|
|
|
|
for (const input of testData) {
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
const data = encoder.encode(input);
|
|
|
|
// Compress and decompress
|
|
const compressed = await new Response(
|
|
new Blob([data]).stream().pipeThrough(new CompressionStream("brotli")),
|
|
).arrayBuffer();
|
|
|
|
const decompressed = await new Response(
|
|
new Blob([compressed]).stream().pipeThrough(new DecompressionStream("brotli")),
|
|
).arrayBuffer();
|
|
|
|
const output = decoder.decode(decompressed);
|
|
expect(output).toBe(input);
|
|
}
|
|
});
|
|
});
|
|
|
|
describe("zstd", () => {
|
|
test("compresses data with zstd", async () => {
|
|
const input = "Hello, Bun! This is a test string for zstd compression.";
|
|
const encoder = new TextEncoder();
|
|
const data = encoder.encode(input);
|
|
|
|
const compressionStream = new CompressionStream("zstd");
|
|
const writer = compressionStream.writable.getWriter();
|
|
writer.write(data);
|
|
writer.close();
|
|
|
|
const compressedChunks: Uint8Array[] = [];
|
|
const reader = compressionStream.readable.getReader();
|
|
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
compressedChunks.push(value);
|
|
}
|
|
|
|
expect(compressedChunks.length).toBeGreaterThan(0);
|
|
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
expect(totalLength).toBeGreaterThan(0);
|
|
});
|
|
|
|
test("decompresses zstd data", async () => {
|
|
const input = "Hello, Bun! This is a test string for zstd decompression.";
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
const data = encoder.encode(input);
|
|
|
|
// First compress
|
|
const compressionStream = new CompressionStream("zstd");
|
|
const writer = compressionStream.writable.getWriter();
|
|
writer.write(data);
|
|
writer.close();
|
|
|
|
const compressedChunks: Uint8Array[] = [];
|
|
const reader = compressionStream.readable.getReader();
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
compressedChunks.push(value);
|
|
}
|
|
|
|
// Concatenate compressed chunks
|
|
const totalLength = compressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
const compressed = new Uint8Array(totalLength);
|
|
let offset = 0;
|
|
for (const chunk of compressedChunks) {
|
|
compressed.set(chunk, offset);
|
|
offset += chunk.length;
|
|
}
|
|
|
|
// Then decompress
|
|
const decompressionStream = new DecompressionStream("zstd");
|
|
const decompWriter = decompressionStream.writable.getWriter();
|
|
decompWriter.write(compressed);
|
|
decompWriter.close();
|
|
|
|
const decompressedChunks: Uint8Array[] = [];
|
|
const decompReader = decompressionStream.readable.getReader();
|
|
while (true) {
|
|
const { done, value } = await decompReader.read();
|
|
if (done) break;
|
|
decompressedChunks.push(value);
|
|
}
|
|
|
|
const decompressedLength = decompressedChunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
const decompressed = new Uint8Array(decompressedLength);
|
|
offset = 0;
|
|
for (const chunk of decompressedChunks) {
|
|
decompressed.set(chunk, offset);
|
|
offset += chunk.length;
|
|
}
|
|
|
|
const output = decoder.decode(decompressed);
|
|
expect(output).toBe(input);
|
|
});
|
|
|
|
test("round-trip compression with zstd", async () => {
|
|
const testData = [
|
|
"Simple string",
|
|
Buffer.alloc(1000, "A").toString(),
|
|
"Mixed 123 !@# symbols",
|
|
"",
|
|
JSON.stringify({ nested: { object: "value" } }),
|
|
];
|
|
|
|
for (const input of testData) {
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
const data = encoder.encode(input);
|
|
|
|
// Compress and decompress
|
|
const compressed = await new Response(
|
|
new Blob([data]).stream().pipeThrough(new CompressionStream("zstd")),
|
|
).arrayBuffer();
|
|
|
|
const decompressed = await new Response(
|
|
new Blob([compressed]).stream().pipeThrough(new DecompressionStream("zstd")),
|
|
).arrayBuffer();
|
|
|
|
const output = decoder.decode(decompressed);
|
|
expect(output).toBe(input);
|
|
}
|
|
});
|
|
});
|
|
|
|
describe("all formats", () => {
|
|
test("works with all compression formats", async () => {
|
|
const formats: Array<"gzip" | "deflate" | "deflate-raw" | "brotli" | "zstd"> = [
|
|
"gzip",
|
|
"deflate",
|
|
"deflate-raw",
|
|
"brotli",
|
|
"zstd",
|
|
];
|
|
|
|
const input = "Test data for all compression formats!";
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
const data = encoder.encode(input);
|
|
|
|
for (const format of formats) {
|
|
const compressed = await new Response(
|
|
new Blob([data]).stream().pipeThrough(new CompressionStream(format)),
|
|
).arrayBuffer();
|
|
|
|
const decompressed = await new Response(
|
|
new Blob([compressed]).stream().pipeThrough(new DecompressionStream(format)),
|
|
).arrayBuffer();
|
|
|
|
const output = decoder.decode(decompressed);
|
|
expect(output).toBe(input);
|
|
}
|
|
});
|
|
});
|
|
});
|