feat(archive): change API to constructor-based with S3 support (#25940)

## Summary
- Change Archive API from `Bun.Archive.from(data)` to `new
Bun.Archive(data, options?)`
- Change compression options from `{ gzip: true }` to `{ compress:
"gzip", level?: number }`
- Default to no compression when no options provided
- Use `{ compress: "gzip" }` to enable gzip compression (level 6 by
default)
- Add Archive support for S3 and local file writes via `Bun.write()`

## New API

```typescript
// Create archive - defaults to uncompressed tar
const archive = new Bun.Archive({
  "hello.txt": "Hello, World!",
  "data.json": JSON.stringify({ foo: "bar" }),
});

// Enable gzip compression
const compressed = new Bun.Archive(files, { compress: "gzip" });

// Gzip with custom level (1-12)
const maxCompression = new Bun.Archive(files, { compress: "gzip", level: 12 });

// Write to local file
await Bun.write("archive.tar", archive);           // uncompressed by default
await Bun.write("archive.tar.gz", compressed);     // gzipped

// Write to S3
await client.write("archive.tar.gz", compressed);          // S3Client.write()
await Bun.write("s3://bucket/archive.tar.gz", compressed); // S3 URL
await s3File.write(compressed);                            // s3File.write()

// Get bytes/blob (uses compression setting from constructor)
const bytes = await archive.bytes();
const blob = await archive.blob();
```

## TypeScript Types

```typescript
type ArchiveCompression = "gzip";

type ArchiveOptions = {
  compress?: "gzip";
  level?: number;  // 1-12, default 6 when gzip enabled
};
```

## Test plan
- [x] 98 archive tests pass
- [x] S3 integration tests updated to new API
- [x] TypeScript types updated
- [x] Documentation updated with new examples

🤖 Generated with [Claude Code](https://claude.com/claude-code)

---------

Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
This commit is contained in:
Dylan Conway
2026-01-12 14:54:21 -08:00
committed by GitHub
parent 461ad886bd
commit 959169dfaf
8 changed files with 779 additions and 336 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1509,3 +1509,128 @@ describe.concurrent("s3 missing credentials", () => {
});
});
});
// Archive + S3 integration tests
describe.skipIf(!minioCredentials)("Archive with S3", () => {
const credentials = minioCredentials!;
it("writes archive to S3 via S3Client.write()", async () => {
const client = new Bun.S3Client(credentials);
const archive = new Bun.Archive({
"hello.txt": "Hello from Archive!",
"data.json": JSON.stringify({ test: true }),
});
const key = randomUUIDv7() + ".tar";
await client.write(key, archive);
// Verify by downloading and reading back
const downloaded = await client.file(key).bytes();
const readArchive = new Bun.Archive(downloaded);
const files = await readArchive.files();
expect(files.size).toBe(2);
expect(await files.get("hello.txt")!.text()).toBe("Hello from Archive!");
expect(await files.get("data.json")!.text()).toBe(JSON.stringify({ test: true }));
// Cleanup
await client.unlink(key);
});
it("writes archive to S3 via Bun.write() with s3:// URL", async () => {
const archive = new Bun.Archive({
"file1.txt": "content1",
"dir/file2.txt": "content2",
});
const key = randomUUIDv7() + ".tar";
const s3Url = `s3://${credentials.bucket}/${key}`;
await Bun.write(s3Url, archive, {
...credentials,
});
// Verify by downloading
const s3File = Bun.file(s3Url, credentials);
const downloaded = await s3File.bytes();
const readArchive = new Bun.Archive(downloaded);
const files = await readArchive.files();
expect(files.size).toBe(2);
expect(await files.get("file1.txt")!.text()).toBe("content1");
expect(await files.get("dir/file2.txt")!.text()).toBe("content2");
// Cleanup
await s3File.delete();
});
it("writes archive with binary content to S3", async () => {
const client = new Bun.S3Client(credentials);
const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd, 0x80, 0x7f]);
const archive = new Bun.Archive({
"binary.bin": binaryData,
});
const key = randomUUIDv7() + ".tar";
await client.write(key, archive);
// Verify binary data is preserved
const downloaded = await client.file(key).bytes();
const readArchive = new Bun.Archive(downloaded);
const files = await readArchive.files();
const extractedBinary = await files.get("binary.bin")!.bytes();
expect(extractedBinary).toEqual(binaryData);
// Cleanup
await client.unlink(key);
});
it("writes large archive to S3", async () => {
const client = new Bun.S3Client(credentials);
// Create archive with multiple files
const entries: Record<string, string> = {};
for (let i = 0; i < 50; i++) {
entries[`file${i.toString().padStart(3, "0")}.txt`] = `Content for file ${i}`;
}
const archive = new Bun.Archive(entries);
const key = randomUUIDv7() + ".tar";
await client.write(key, archive);
// Verify
const downloaded = await client.file(key).bytes();
const readArchive = new Bun.Archive(downloaded);
const files = await readArchive.files();
expect(files.size).toBe(50);
expect(await files.get("file000.txt")!.text()).toBe("Content for file 0");
expect(await files.get("file049.txt")!.text()).toBe("Content for file 49");
// Cleanup
await client.unlink(key);
});
it("writes archive via s3File.write()", async () => {
const client = new Bun.S3Client(credentials);
const archive = new Bun.Archive({
"test.txt": "Hello via s3File.write()!",
});
const key = randomUUIDv7() + ".tar";
const s3File = client.file(key);
await s3File.write(archive);
// Verify
const downloaded = await s3File.bytes();
const readArchive = new Bun.Archive(downloaded);
const files = await readArchive.files();
expect(files.size).toBe(1);
expect(await files.get("test.txt")!.text()).toBe("Hello via s3File.write()!");
// Cleanup
await s3File.delete();
});
});