mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
feat(archive): change API to constructor-based with S3 support (#25940)
## Summary
- Change Archive API from `Bun.Archive.from(data)` to `new
Bun.Archive(data, options?)`
- Change compression options from `{ gzip: true }` to `{ compress:
"gzip", level?: number }`
- Default to no compression when no options provided
- Use `{ compress: "gzip" }` to enable gzip compression (level 6 by
default)
- Add Archive support for S3 and local file writes via `Bun.write()`
## New API
```typescript
// Create archive - defaults to uncompressed tar
const archive = new Bun.Archive({
"hello.txt": "Hello, World!",
"data.json": JSON.stringify({ foo: "bar" }),
});
// Enable gzip compression
const compressed = new Bun.Archive(files, { compress: "gzip" });
// Gzip with custom level (1-12)
const maxCompression = new Bun.Archive(files, { compress: "gzip", level: 12 });
// Write to local file
await Bun.write("archive.tar", archive); // uncompressed by default
await Bun.write("archive.tar.gz", compressed); // gzipped
// Write to S3
await client.write("archive.tar.gz", compressed); // S3Client.write()
await Bun.write("s3://bucket/archive.tar.gz", compressed); // S3 URL
await s3File.write(compressed); // s3File.write()
// Get bytes/blob (uses compression setting from constructor)
const bytes = await archive.bytes();
const blob = await archive.blob();
```
## TypeScript Types
```typescript
type ArchiveCompression = "gzip";
type ArchiveOptions = {
compress?: "gzip";
level?: number; // 1-12, default 6 when gzip enabled
};
```
## Test plan
- [x] 98 archive tests pass
- [x] S3 integration tests updated to new API
- [x] TypeScript types updated
- [x] Documentation updated with new examples
🤖 Generated with [Claude Code](https://claude.com/claude-code)
---------
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
This commit is contained in:
@@ -10,21 +10,21 @@ Bun provides a fast, native implementation for working with tar archives through
|
||||
**Create an archive from files:**
|
||||
|
||||
```ts
|
||||
const archive = Bun.Archive.from({
|
||||
const archive = new Bun.Archive({
|
||||
"hello.txt": "Hello, World!",
|
||||
"data.json": JSON.stringify({ foo: "bar" }),
|
||||
"nested/file.txt": "Nested content",
|
||||
});
|
||||
|
||||
// Write to disk
|
||||
await Bun.Archive.write("bundle.tar", archive);
|
||||
await Bun.write("bundle.tar", archive);
|
||||
```
|
||||
|
||||
**Extract an archive:**
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const entryCount = await archive.extract("./output");
|
||||
console.log(`Extracted ${entryCount} entries`);
|
||||
```
|
||||
@@ -33,7 +33,7 @@ console.log(`Extracted ${entryCount} entries`);
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const files = await archive.files();
|
||||
|
||||
for (const [path, file] of files) {
|
||||
@@ -43,10 +43,11 @@ for (const [path, file] of files) {
|
||||
|
||||
## Creating Archives
|
||||
|
||||
Use `Bun.Archive.from()` to create an archive from an object where keys are file paths and values are file contents:
|
||||
Use `new Bun.Archive()` to create an archive from an object where keys are file paths and values are file contents. By default, archives are uncompressed:
|
||||
|
||||
```ts
|
||||
const archive = Bun.Archive.from({
|
||||
// Creates an uncompressed tar archive (default)
|
||||
const archive = new Bun.Archive({
|
||||
"README.md": "# My Project",
|
||||
"src/index.ts": "console.log('Hello');",
|
||||
"package.json": JSON.stringify({ name: "my-project" }),
|
||||
@@ -64,7 +65,7 @@ File contents can be:
|
||||
const data = "binary data";
|
||||
const arrayBuffer = new ArrayBuffer(8);
|
||||
|
||||
const archive = Bun.Archive.from({
|
||||
const archive = new Bun.Archive({
|
||||
"text.txt": "Plain text",
|
||||
"blob.bin": new Blob([data]),
|
||||
"bytes.bin": new Uint8Array([1, 2, 3, 4]),
|
||||
@@ -74,18 +75,19 @@ const archive = Bun.Archive.from({
|
||||
|
||||
### Writing Archives to Disk
|
||||
|
||||
Use `Bun.Archive.write()` to create and write an archive in one operation:
|
||||
Use `Bun.write()` to write an archive to disk:
|
||||
|
||||
```ts
|
||||
// Write uncompressed tar
|
||||
await Bun.Archive.write("output.tar", {
|
||||
// Write uncompressed tar (default)
|
||||
const archive = new Bun.Archive({
|
||||
"file1.txt": "content1",
|
||||
"file2.txt": "content2",
|
||||
});
|
||||
await Bun.write("output.tar", archive);
|
||||
|
||||
// Write gzipped tar
|
||||
const files = { "src/index.ts": "console.log('Hello');" };
|
||||
await Bun.Archive.write("output.tar.gz", files, "gzip");
|
||||
const compressed = new Bun.Archive({ "src/index.ts": "console.log('Hello');" }, { compress: "gzip" });
|
||||
await Bun.write("output.tar.gz", compressed);
|
||||
```
|
||||
|
||||
### Getting Archive Bytes
|
||||
@@ -93,8 +95,7 @@ await Bun.Archive.write("output.tar.gz", files, "gzip");
|
||||
Get the archive data as bytes or a Blob:
|
||||
|
||||
```ts
|
||||
const files = { "hello.txt": "Hello, World!" };
|
||||
const archive = Bun.Archive.from(files);
|
||||
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
|
||||
|
||||
// As Uint8Array
|
||||
const bytes = await archive.bytes();
|
||||
@@ -102,9 +103,10 @@ const bytes = await archive.bytes();
|
||||
// As Blob
|
||||
const blob = await archive.blob();
|
||||
|
||||
// With gzip compression
|
||||
const gzippedBytes = await archive.bytes("gzip");
|
||||
const gzippedBlob = await archive.blob("gzip");
|
||||
// With gzip compression (set at construction)
|
||||
const gzipped = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
|
||||
const gzippedBytes = await gzipped.bytes();
|
||||
const gzippedBlob = await gzipped.blob();
|
||||
```
|
||||
|
||||
## Extracting Archives
|
||||
@@ -116,13 +118,13 @@ Create an archive from existing tar/tar.gz data:
|
||||
```ts
|
||||
// From a file
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archiveFromFile = Bun.Archive.from(tarball);
|
||||
const archiveFromFile = new Bun.Archive(tarball);
|
||||
```
|
||||
|
||||
```ts
|
||||
// From a fetch response
|
||||
const response = await fetch("https://example.com/archive.tar.gz");
|
||||
const archiveFromFetch = Bun.Archive.from(await response.blob());
|
||||
const archiveFromFetch = new Bun.Archive(await response.blob());
|
||||
```
|
||||
|
||||
### Extracting to Disk
|
||||
@@ -131,7 +133,7 @@ Use `.extract()` to write all files to a directory:
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const count = await archive.extract("./extracted");
|
||||
console.log(`Extracted ${count} entries`);
|
||||
```
|
||||
@@ -148,7 +150,7 @@ Use glob patterns to extract only specific files. Patterns are matched against a
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
|
||||
// Extract only TypeScript files
|
||||
const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" });
|
||||
@@ -181,7 +183,7 @@ Use `.files()` to get archive contents as a `Map` of `File` objects without extr
|
||||
|
||||
```ts
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const files = await archive.files();
|
||||
|
||||
for (const [path, file] of files) {
|
||||
@@ -206,7 +208,7 @@ Archive operations can fail due to corrupted data, I/O errors, or invalid paths.
|
||||
```ts
|
||||
try {
|
||||
const tarball = await Bun.file("package.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(tarball);
|
||||
const archive = new Bun.Archive(tarball);
|
||||
const count = await archive.extract("./output");
|
||||
console.log(`Extracted ${count} entries`);
|
||||
} catch (e: unknown) {
|
||||
@@ -227,7 +229,7 @@ try {
|
||||
|
||||
Common error scenarios:
|
||||
|
||||
- **Corrupted/truncated archives** - `Archive.from()` loads the archive data; errors may be deferred until read/extract operations
|
||||
- **Corrupted/truncated archives** - `new Archive()` loads the archive data; errors may be deferred until read/extract operations
|
||||
- **Permission denied** - `extract()` throws if the target directory is not writable
|
||||
- **Disk full** - `extract()` throws if there's insufficient space
|
||||
- **Invalid paths** - Operations throw for malformed file paths
|
||||
@@ -239,7 +241,7 @@ The count returned by `extract()` includes all successfully written entries (fil
|
||||
For additional security with untrusted archives, you can enumerate and validate paths before extraction:
|
||||
|
||||
```ts
|
||||
const archive = Bun.Archive.from(untrustedData);
|
||||
const archive = new Bun.Archive(untrustedData);
|
||||
const files = await archive.files();
|
||||
|
||||
// Optional: Custom validation for additional checks
|
||||
@@ -298,26 +300,28 @@ See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and a
|
||||
|
||||
## Compression
|
||||
|
||||
Bun.Archive supports gzip compression for both reading and writing:
|
||||
Bun.Archive creates uncompressed tar archives by default. Use `{ compress: "gzip" }` to enable gzip compression:
|
||||
|
||||
```ts
|
||||
// Default: uncompressed tar
|
||||
const archive = new Bun.Archive({ "hello.txt": "Hello, World!" });
|
||||
|
||||
// Reading: automatically detects gzip
|
||||
const gzippedTarball = await Bun.file("archive.tar.gz").bytes();
|
||||
const archive = Bun.Archive.from(gzippedTarball);
|
||||
const readArchive = new Bun.Archive(gzippedTarball);
|
||||
|
||||
// Writing: specify compression
|
||||
const files = { "hello.txt": "Hello, World!" };
|
||||
await Bun.Archive.write("output.tar.gz", files, "gzip");
|
||||
// Enable gzip compression
|
||||
const compressed = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" });
|
||||
|
||||
// Getting bytes: specify compression
|
||||
const gzippedBytes = await archive.bytes("gzip");
|
||||
// Gzip with custom level (1-12)
|
||||
const maxCompression = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 12 });
|
||||
```
|
||||
|
||||
The compression argument accepts:
|
||||
The options accept:
|
||||
|
||||
- `"gzip"` - Enable gzip compression
|
||||
- `true` - Same as `"gzip"`
|
||||
- `false` or `undefined` - No compression
|
||||
- No options or `undefined` - Uncompressed tar (default)
|
||||
- `{ compress: "gzip" }` - Enable gzip compression at level 6
|
||||
- `{ compress: "gzip", level: number }` - Gzip with custom level 1-12 (1 = fastest, 12 = smallest)
|
||||
|
||||
## Examples
|
||||
|
||||
@@ -339,15 +343,16 @@ for await (const path of glob.scan(".")) {
|
||||
// Add package.json
|
||||
files["package.json"] = await Bun.file("package.json").text();
|
||||
|
||||
// Create compressed archive
|
||||
await Bun.Archive.write("bundle.tar.gz", files, "gzip");
|
||||
// Create compressed archive and write to disk
|
||||
const archive = new Bun.Archive(files, { compress: "gzip" });
|
||||
await Bun.write("bundle.tar.gz", archive);
|
||||
```
|
||||
|
||||
### Extract and Process npm Package
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz");
|
||||
const archive = Bun.Archive.from(await response.blob());
|
||||
const archive = new Bun.Archive(await response.blob());
|
||||
|
||||
// Get package.json
|
||||
const files = await archive.files("package/package.json");
|
||||
@@ -365,7 +370,7 @@ if (packageJson) {
|
||||
import { readdir } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
async function archiveDirectory(dir: string): Promise<Bun.Archive> {
|
||||
async function archiveDirectory(dir: string, compress = false): Promise<Bun.Archive> {
|
||||
const files: Record<string, Blob> = {};
|
||||
|
||||
async function walk(currentDir: string, prefix: string = "") {
|
||||
@@ -384,11 +389,11 @@ async function archiveDirectory(dir: string): Promise<Bun.Archive> {
|
||||
}
|
||||
|
||||
await walk(dir);
|
||||
return Bun.Archive.from(files);
|
||||
return new Bun.Archive(files, compress ? { compress: "gzip" } : undefined);
|
||||
}
|
||||
|
||||
const archive = await archiveDirectory("./my-project");
|
||||
await Bun.Archive.write("my-project.tar.gz", archive, "gzip");
|
||||
const archive = await archiveDirectory("./my-project", true);
|
||||
await Bun.write("my-project.tar.gz", archive);
|
||||
```
|
||||
|
||||
## Reference
|
||||
@@ -396,14 +401,19 @@ await Bun.Archive.write("my-project.tar.gz", archive, "gzip");
|
||||
> **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions.
|
||||
|
||||
```ts
|
||||
type ArchiveCompression = "gzip" | boolean;
|
||||
|
||||
type ArchiveInput =
|
||||
| Record<string, string | Blob | Bun.ArrayBufferView | ArrayBufferLike>
|
||||
| Blob
|
||||
| Bun.ArrayBufferView
|
||||
| ArrayBufferLike;
|
||||
|
||||
type ArchiveOptions = {
|
||||
/** Compression algorithm. Currently only "gzip" is supported. */
|
||||
compress?: "gzip";
|
||||
/** Compression level 1-12 (default 6 when gzip is enabled). */
|
||||
level?: number;
|
||||
};
|
||||
|
||||
interface ArchiveExtractOptions {
|
||||
/** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */
|
||||
glob?: string | readonly string[];
|
||||
@@ -412,13 +422,11 @@ interface ArchiveExtractOptions {
|
||||
class Archive {
|
||||
/**
|
||||
* Create an Archive from input data
|
||||
* @param data - Files to archive (as object) or existing archive data (as bytes/blob)
|
||||
* @param options - Compression options. Uncompressed by default.
|
||||
* Pass { compress: "gzip" } to enable compression.
|
||||
*/
|
||||
static from(data: ArchiveInput): Archive;
|
||||
|
||||
/**
|
||||
* Write an archive directly to disk
|
||||
*/
|
||||
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
|
||||
constructor(data: ArchiveInput, options?: ArchiveOptions);
|
||||
|
||||
/**
|
||||
* Extract archive to a directory
|
||||
@@ -427,14 +435,14 @@ class Archive {
|
||||
extract(path: string, options?: ArchiveExtractOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get archive as a Blob
|
||||
* Get archive as a Blob (uses compression setting from constructor)
|
||||
*/
|
||||
blob(compress?: ArchiveCompression): Promise<Blob>;
|
||||
blob(): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Get archive as a Uint8Array
|
||||
* Get archive as a Uint8Array (uses compression setting from constructor)
|
||||
*/
|
||||
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
|
||||
bytes(): Promise<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* Get archive contents as File objects (regular files only, no directories)
|
||||
|
||||
126
packages/bun-types/bun.d.ts
vendored
126
packages/bun-types/bun.d.ts
vendored
@@ -750,7 +750,7 @@ declare module "bun" {
|
||||
*/
|
||||
function write(
|
||||
destination: BunFile | S3File | PathLike,
|
||||
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[],
|
||||
input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[] | Archive,
|
||||
options?: {
|
||||
/**
|
||||
* If writing to a PathLike, set the permissions of the file.
|
||||
@@ -6975,15 +6975,44 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Compression format for archive output.
|
||||
* - `"gzip"` - Compress with gzip
|
||||
* - `true` - Same as `"gzip"`
|
||||
* - `false` - Explicitly disable compression (no compression)
|
||||
* - `undefined` - No compression (default behavior when omitted)
|
||||
*
|
||||
* Both `false` and `undefined` result in no compression; `false` can be used
|
||||
* to explicitly indicate "no compression" in code where the intent should be clear.
|
||||
* Currently only `"gzip"` is supported.
|
||||
*/
|
||||
type ArchiveCompression = "gzip" | boolean;
|
||||
type ArchiveCompression = "gzip";
|
||||
|
||||
/**
|
||||
* Options for creating an Archive instance.
|
||||
*
|
||||
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // No compression (default)
|
||||
* new Bun.Archive(data);
|
||||
*
|
||||
* // Enable gzip with default level (6)
|
||||
* new Bun.Archive(data, { compress: "gzip" });
|
||||
*
|
||||
* // Specify compression level
|
||||
* new Bun.Archive(data, { compress: "gzip", level: 9 });
|
||||
* ```
|
||||
*/
|
||||
interface ArchiveOptions {
|
||||
/**
|
||||
* Compression algorithm to use.
|
||||
* Currently only "gzip" is supported.
|
||||
* If not specified, no compression is applied.
|
||||
*/
|
||||
compress?: ArchiveCompression;
|
||||
/**
|
||||
* Compression level (1-12). Only applies when `compress` is set.
|
||||
* - 1: Fastest compression, lowest ratio
|
||||
* - 6: Default balance of speed and ratio
|
||||
* - 12: Best compression ratio, slowest
|
||||
*
|
||||
* @default 6
|
||||
*/
|
||||
level?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for extracting archive contents.
|
||||
@@ -7031,7 +7060,7 @@ declare module "bun" {
|
||||
* @example
|
||||
* **Create an archive from an object:**
|
||||
* ```ts
|
||||
* const archive = Bun.Archive.from({
|
||||
* const archive = new Bun.Archive({
|
||||
* "hello.txt": "Hello, World!",
|
||||
* "data.json": JSON.stringify({ foo: "bar" }),
|
||||
* "binary.bin": new Uint8Array([1, 2, 3, 4]),
|
||||
@@ -7039,9 +7068,20 @@ declare module "bun" {
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **Create a gzipped archive:**
|
||||
* ```ts
|
||||
* const archive = new Bun.Archive({
|
||||
* "hello.txt": "Hello, World!",
|
||||
* }, { compress: "gzip" });
|
||||
*
|
||||
* // Or with a specific compression level (1-12)
|
||||
* const archive = new Bun.Archive(data, { compress: "gzip", level: 9 });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **Extract an archive to disk:**
|
||||
* ```ts
|
||||
* const archive = Bun.Archive.from(tarballBytes);
|
||||
* const archive = new Bun.Archive(tarballBytes);
|
||||
* const entryCount = await archive.extract("./output");
|
||||
* console.log(`Extracted ${entryCount} entries`);
|
||||
* ```
|
||||
@@ -7049,7 +7089,7 @@ declare module "bun" {
|
||||
* @example
|
||||
* **Get archive contents as a Map of File objects:**
|
||||
* ```ts
|
||||
* const archive = Bun.Archive.from(tarballBytes);
|
||||
* const archive = new Bun.Archive(tarballBytes);
|
||||
* const entries = await archive.files();
|
||||
* for (const [path, file] of entries) {
|
||||
* console.log(path, await file.text());
|
||||
@@ -7062,36 +7102,50 @@ declare module "bun" {
|
||||
* await Bun.Archive.write("bundle.tar.gz", {
|
||||
* "src/index.ts": sourceCode,
|
||||
* "package.json": packageJson,
|
||||
* }, "gzip");
|
||||
* }, { compress: "gzip" });
|
||||
* ```
|
||||
*/
|
||||
export class Archive {
|
||||
/**
|
||||
* Create an `Archive` instance from input data.
|
||||
*
|
||||
* By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression.
|
||||
*
|
||||
* @param data - The input data for the archive:
|
||||
* - **Object**: Creates a new tarball with the object's keys as file paths and values as file contents
|
||||
* - **Blob/TypedArray/ArrayBuffer**: Wraps existing archive data (tar or tar.gz)
|
||||
*
|
||||
* @returns A new `Archive` instance
|
||||
* @param options - Optional archive options including compression settings.
|
||||
* Defaults to no compression if omitted.
|
||||
*
|
||||
* @example
|
||||
* **From an object (creates new tarball):**
|
||||
* **From an object (creates uncompressed tarball):**
|
||||
* ```ts
|
||||
* const archive = Bun.Archive.from({
|
||||
* const archive = new Bun.Archive({
|
||||
* "hello.txt": "Hello, World!",
|
||||
* "nested/file.txt": "Nested content",
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **With gzip compression:**
|
||||
* ```ts
|
||||
* const archive = new Bun.Archive(data, { compress: "gzip" });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **With explicit gzip compression level:**
|
||||
* ```ts
|
||||
* const archive = new Bun.Archive(data, { compress: "gzip", level: 12 });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **From existing archive data:**
|
||||
* ```ts
|
||||
* const response = await fetch("https://example.com/package.tar.gz");
|
||||
* const archive = Bun.Archive.from(await response.blob());
|
||||
* const archive = new Bun.Archive(await response.blob());
|
||||
* ```
|
||||
*/
|
||||
static from(data: ArchiveInput): Archive;
|
||||
constructor(data: ArchiveInput, options?: ArchiveOptions);
|
||||
|
||||
/**
|
||||
* Create and write an archive directly to disk in one operation.
|
||||
@@ -7100,8 +7154,8 @@ declare module "bun" {
|
||||
* as it streams the data directly to disk.
|
||||
*
|
||||
* @param path - The file path to write the archive to
|
||||
* @param data - The input data for the archive (same as `Archive.from()`)
|
||||
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
||||
* @param data - The input data for the archive (same as `new Archive()`)
|
||||
* @param options - Optional archive options including compression settings
|
||||
*
|
||||
* @returns A promise that resolves when the write is complete
|
||||
*
|
||||
@@ -7117,10 +7171,10 @@ declare module "bun" {
|
||||
* @example
|
||||
* **Write gzipped tarball:**
|
||||
* ```ts
|
||||
* await Bun.Archive.write("output.tar.gz", files, "gzip");
|
||||
* await Bun.Archive.write("output.tar.gz", files, { compress: "gzip" });
|
||||
* ```
|
||||
*/
|
||||
static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise<void>;
|
||||
static write(path: string, data: ArchiveInput | Archive, options?: ArchiveOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Extract the archive contents to a directory on disk.
|
||||
@@ -7136,7 +7190,7 @@ declare module "bun" {
|
||||
* @example
|
||||
* **Extract all entries:**
|
||||
* ```ts
|
||||
* const archive = Bun.Archive.from(tarballBytes);
|
||||
* const archive = new Bun.Archive(tarballBytes);
|
||||
* const count = await archive.extract("./extracted");
|
||||
* console.log(`Extracted ${count} entries`);
|
||||
* ```
|
||||
@@ -7166,42 +7220,48 @@ declare module "bun" {
|
||||
/**
|
||||
* Get the archive contents as a `Blob`.
|
||||
*
|
||||
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
||||
* Uses the compression settings specified when the Archive was created.
|
||||
*
|
||||
* @returns A promise that resolves with the archive data as a Blob
|
||||
*
|
||||
* @example
|
||||
* **Get uncompressed tarball:**
|
||||
* **Get tarball as Blob:**
|
||||
* ```ts
|
||||
* const archive = new Bun.Archive(data);
|
||||
* const blob = await archive.blob();
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **Get gzipped tarball:**
|
||||
* **Get gzipped tarball as Blob:**
|
||||
* ```ts
|
||||
* const gzippedBlob = await archive.blob("gzip");
|
||||
* const archive = new Bun.Archive(data, { compress: "gzip" });
|
||||
* const gzippedBlob = await archive.blob();
|
||||
* ```
|
||||
*/
|
||||
blob(compress?: ArchiveCompression): Promise<Blob>;
|
||||
blob(): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Get the archive contents as a `Uint8Array`.
|
||||
*
|
||||
* @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none
|
||||
* Uses the compression settings specified when the Archive was created.
|
||||
*
|
||||
* @returns A promise that resolves with the archive data as a Uint8Array
|
||||
*
|
||||
* @example
|
||||
* **Get uncompressed tarball bytes:**
|
||||
* **Get tarball bytes:**
|
||||
* ```ts
|
||||
* const archive = new Bun.Archive(data);
|
||||
* const bytes = await archive.bytes();
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* **Get gzipped tarball bytes:**
|
||||
* ```ts
|
||||
* const gzippedBytes = await archive.bytes("gzip");
|
||||
* const archive = new Bun.Archive(data, { compress: "gzip" });
|
||||
* const gzippedBytes = await archive.bytes();
|
||||
* ```
|
||||
*/
|
||||
bytes(compress?: ArchiveCompression): Promise<Uint8Array<ArrayBuffer>>;
|
||||
bytes(): Promise<Uint8Array<ArrayBuffer>>;
|
||||
|
||||
/**
|
||||
* Get the archive contents as a `Map` of `File` objects.
|
||||
|
||||
18
packages/bun-types/s3.d.ts
vendored
18
packages/bun-types/s3.d.ts
vendored
@@ -609,7 +609,17 @@ declare module "bun" {
|
||||
* });
|
||||
*/
|
||||
write(
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob,
|
||||
data:
|
||||
| string
|
||||
| ArrayBufferView
|
||||
| ArrayBuffer
|
||||
| SharedArrayBuffer
|
||||
| Request
|
||||
| Response
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
@@ -920,7 +930,8 @@ declare module "bun" {
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| File,
|
||||
| File
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
@@ -970,7 +981,8 @@ declare module "bun" {
|
||||
| BunFile
|
||||
| S3File
|
||||
| Blob
|
||||
| File,
|
||||
| File
|
||||
| Archive,
|
||||
options?: S3Options,
|
||||
): Promise<number>;
|
||||
|
||||
|
||||
@@ -8,10 +8,6 @@ export default [
|
||||
configurable: false,
|
||||
JSType: "0b11101110",
|
||||
klass: {
|
||||
from: {
|
||||
fn: "from",
|
||||
length: 1,
|
||||
},
|
||||
write: {
|
||||
fn: "write",
|
||||
length: 2,
|
||||
|
||||
@@ -5,8 +5,19 @@ pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
/// Compression options for the archive
|
||||
pub const Compression = union(enum) {
|
||||
none,
|
||||
gzip: struct {
|
||||
/// Compression level: 1 (fastest) to 12 (maximum compression). Default is 6.
|
||||
level: u8 = 6,
|
||||
},
|
||||
};
|
||||
|
||||
/// The underlying data for the archive - uses Blob.Store for thread-safe ref counting
|
||||
store: *jsc.WebCore.Blob.Store,
|
||||
/// Compression settings for this archive
|
||||
compress: Compression = .none,
|
||||
|
||||
pub fn finalize(this: *Archive) void {
|
||||
jsc.markBinding(@src());
|
||||
@@ -65,47 +76,95 @@ fn countFilesInArchive(data: []const u8) u32 {
|
||||
return count;
|
||||
}
|
||||
|
||||
/// Constructor: new Archive() - throws an error since users should use Archive.from()
|
||||
pub fn constructor(globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!*Archive {
|
||||
return globalThis.throwInvalidArguments("Archive cannot be constructed directly. Use Archive.from() instead.", .{});
|
||||
}
|
||||
|
||||
/// Static method: Archive.from(data)
|
||||
/// Constructor: new Archive(data, options?)
|
||||
/// Creates an Archive from either:
|
||||
/// - An object { [path: string]: Blob | string | ArrayBufferView | ArrayBufferLike }
|
||||
/// - A Blob, ArrayBufferView, or ArrayBufferLike (assumes it's already a valid archive)
|
||||
pub fn from(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
const arg = callframe.argumentsAsArray(1)[0];
|
||||
if (arg == .zero) {
|
||||
return globalThis.throwInvalidArguments("Archive.from requires an argument", .{});
|
||||
/// Options:
|
||||
/// - compress: "gzip" - Enable gzip compression
|
||||
/// - level: number (1-12) - Compression level (default 6)
|
||||
/// When no options are provided, no compression is applied
|
||||
pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!*Archive {
|
||||
const data_arg, const options_arg = callframe.argumentsAsArray(2);
|
||||
if (data_arg == .zero) {
|
||||
return globalThis.throwInvalidArguments("new Archive() requires an argument", .{});
|
||||
}
|
||||
|
||||
// Parse compression options
|
||||
const compress = try parseCompressionOptions(globalThis, options_arg);
|
||||
|
||||
// For Blob/Archive, ref the existing store (zero-copy)
|
||||
if (arg.as(jsc.WebCore.Blob)) |blob_ptr| {
|
||||
if (data_arg.as(jsc.WebCore.Blob)) |blob_ptr| {
|
||||
if (blob_ptr.store) |store| {
|
||||
store.ref();
|
||||
return bun.new(Archive, .{ .store = store }).toJS(globalThis);
|
||||
return bun.new(Archive, .{ .store = store, .compress = compress });
|
||||
}
|
||||
}
|
||||
|
||||
// For ArrayBuffer/TypedArray, copy the data
|
||||
if (arg.asArrayBuffer(globalThis)) |array_buffer| {
|
||||
if (data_arg.asArrayBuffer(globalThis)) |array_buffer| {
|
||||
const data = try bun.default_allocator.dupe(u8, array_buffer.slice());
|
||||
return createArchive(globalThis, data);
|
||||
return createArchive(data, compress);
|
||||
}
|
||||
|
||||
// For plain objects, build a tarball
|
||||
if (arg.isObject()) {
|
||||
const data = try buildTarballFromObject(globalThis, arg);
|
||||
return createArchive(globalThis, data);
|
||||
if (data_arg.isObject()) {
|
||||
const data = try buildTarballFromObject(globalThis, data_arg);
|
||||
return createArchive(data, compress);
|
||||
}
|
||||
|
||||
return globalThis.throwInvalidArguments("Expected an object, Blob, TypedArray, or ArrayBuffer", .{});
|
||||
}
|
||||
|
||||
fn createArchive(globalThis: *jsc.JSGlobalObject, data: []u8) jsc.JSValue {
|
||||
/// Parse compression options from JS value
|
||||
/// Returns .none if no compression specified, caller must handle defaults
|
||||
fn parseCompressionOptions(globalThis: *jsc.JSGlobalObject, options_arg: jsc.JSValue) bun.JSError!Compression {
|
||||
// No options provided means no compression (caller handles defaults)
|
||||
if (options_arg.isUndefinedOrNull()) {
|
||||
return .none;
|
||||
}
|
||||
|
||||
if (!options_arg.isObject()) {
|
||||
return globalThis.throwInvalidArguments("Archive: options must be an object", .{});
|
||||
}
|
||||
|
||||
// Check for compress option
|
||||
if (try options_arg.getTruthy(globalThis, "compress")) |compress_val| {
|
||||
// compress must be "gzip"
|
||||
if (!compress_val.isString()) {
|
||||
return globalThis.throwInvalidArguments("Archive: compress option must be a string", .{});
|
||||
}
|
||||
|
||||
const compress_str = try compress_val.toSlice(globalThis, bun.default_allocator);
|
||||
defer compress_str.deinit();
|
||||
|
||||
if (!bun.strings.eqlComptime(compress_str.slice(), "gzip")) {
|
||||
return globalThis.throwInvalidArguments("Archive: compress option must be \"gzip\"", .{});
|
||||
}
|
||||
|
||||
// Parse level option (1-12, default 6)
|
||||
var level: u8 = 6;
|
||||
if (try options_arg.getTruthy(globalThis, "level")) |level_val| {
|
||||
if (!level_val.isNumber()) {
|
||||
return globalThis.throwInvalidArguments("Archive: level must be a number", .{});
|
||||
}
|
||||
const level_num = level_val.toInt64();
|
||||
if (level_num < 1 or level_num > 12) {
|
||||
return globalThis.throwInvalidArguments("Archive: level must be between 1 and 12", .{});
|
||||
}
|
||||
level = @intCast(level_num);
|
||||
}
|
||||
|
||||
return .{ .gzip = .{ .level = level } };
|
||||
}
|
||||
|
||||
// No compress option specified in options object means no compression
|
||||
return .none;
|
||||
}
|
||||
|
||||
fn createArchive(data: []u8, compress: Compression) *Archive {
|
||||
const store = jsc.WebCore.Blob.Store.init(data, bun.default_allocator);
|
||||
return bun.new(Archive, .{ .store = store }).toJS(globalThis);
|
||||
return bun.new(Archive, .{ .store = store, .compress = compress });
|
||||
}
|
||||
|
||||
/// Shared helper that builds tarball bytes from a JS object
|
||||
@@ -212,12 +271,15 @@ fn getEntryData(globalThis: *jsc.JSGlobalObject, value: jsc.JSValue, allocator:
|
||||
return value.toSlice(globalThis, allocator);
|
||||
}
|
||||
|
||||
/// Static method: Archive.write(path, data, compress?)
|
||||
/// Creates and writes an archive to disk in one operation
|
||||
/// Static method: Archive.write(path, data, options?)
|
||||
/// Creates and writes an archive to disk in one operation.
|
||||
/// For Archive instances, uses the archive's compression settings unless overridden by options.
|
||||
/// Options:
|
||||
/// - gzip: { level?: number } - Override compression settings
|
||||
pub fn write(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
const path_arg, const data_arg, const compress_arg = callframe.argumentsAsArray(3);
|
||||
const path_arg, const data_arg, const options_arg = callframe.argumentsAsArray(3);
|
||||
if (data_arg == .zero) {
|
||||
return globalThis.throwInvalidArguments("Archive.write requires at least 2 arguments (path, data)", .{});
|
||||
return globalThis.throwInvalidArguments("Archive.write requires 2 arguments (path, data)", .{});
|
||||
}
|
||||
|
||||
// Get the path
|
||||
@@ -228,61 +290,37 @@ pub fn write(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSE
|
||||
const path_slice = try path_arg.toSlice(globalThis, bun.default_allocator);
|
||||
defer path_slice.deinit();
|
||||
|
||||
// Determine compression
|
||||
const use_gzip = try parseCompressArg(globalThis, compress_arg);
|
||||
// Parse options for compression override
|
||||
const options_compress = try parseCompressionOptions(globalThis, options_arg);
|
||||
|
||||
// Try to use store reference (zero-copy) for Archive/Blob
|
||||
// For Archive instances, use options override or archive's compression settings
|
||||
if (fromJS(data_arg)) |archive| {
|
||||
return startWriteTask(globalThis, .{ .store = archive.store }, path_slice.slice(), use_gzip);
|
||||
const compress = if (options_compress != .none) options_compress else archive.compress;
|
||||
return startWriteTask(globalThis, .{ .store = archive.store }, path_slice.slice(), compress);
|
||||
}
|
||||
|
||||
// For Blobs, use store reference with options compression
|
||||
if (data_arg.as(jsc.WebCore.Blob)) |blob_ptr| {
|
||||
if (blob_ptr.store) |store| {
|
||||
return startWriteTask(globalThis, .{ .store = store }, path_slice.slice(), use_gzip);
|
||||
return startWriteTask(globalThis, .{ .store = store }, path_slice.slice(), options_compress);
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to copying data for ArrayBuffer/TypedArray/objects
|
||||
const archive_data = try getArchiveData(globalThis, data_arg);
|
||||
return startWriteTask(globalThis, .{ .owned = archive_data }, path_slice.slice(), use_gzip);
|
||||
// For ArrayBuffer/TypedArray, copy the data with options compression
|
||||
if (data_arg.asArrayBuffer(globalThis)) |array_buffer| {
|
||||
const data = try bun.default_allocator.dupe(u8, array_buffer.slice());
|
||||
return startWriteTask(globalThis, .{ .owned = data }, path_slice.slice(), options_compress);
|
||||
}
|
||||
|
||||
/// Get archive data from a value, returning owned bytes
|
||||
fn getArchiveData(globalThis: *jsc.JSGlobalObject, arg: jsc.JSValue) bun.JSError![]u8 {
|
||||
// Check if it's a typed array, ArrayBuffer, or similar
|
||||
if (arg.asArrayBuffer(globalThis)) |array_buffer| {
|
||||
return bun.default_allocator.dupe(u8, array_buffer.slice());
|
||||
}
|
||||
|
||||
// Check if it's an object with entries (plain object) - build tarball
|
||||
if (arg.isObject()) {
|
||||
return buildTarballFromObject(globalThis, arg);
|
||||
// For plain objects, build a tarball with options compression
|
||||
if (data_arg.isObject()) {
|
||||
const data = try buildTarballFromObject(globalThis, data_arg);
|
||||
return startWriteTask(globalThis, .{ .owned = data }, path_slice.slice(), options_compress);
|
||||
}
|
||||
|
||||
return globalThis.throwInvalidArguments("Expected an object, Blob, TypedArray, ArrayBuffer, or Archive", .{});
|
||||
}
|
||||
|
||||
fn parseCompressArg(globalThis: *jsc.JSGlobalObject, arg: jsc.JSValue) bun.JSError!bool {
|
||||
if (arg.isUndefinedOrNull()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (arg.isBoolean()) {
|
||||
return arg.toBoolean();
|
||||
}
|
||||
|
||||
if (arg.isString()) {
|
||||
const str = try arg.toSlice(globalThis, bun.default_allocator);
|
||||
defer str.deinit();
|
||||
if (std.mem.eql(u8, str.slice(), "gzip")) {
|
||||
return true;
|
||||
}
|
||||
return globalThis.throwInvalidArguments("Archive: compress argument must be 'gzip', a boolean, or undefined", .{});
|
||||
}
|
||||
|
||||
return globalThis.throwInvalidArguments("Archive: compress argument must be 'gzip', a boolean, or undefined", .{});
|
||||
}
|
||||
|
||||
/// Instance method: archive.extract(path, options?)
|
||||
/// Extracts the archive to the given path
|
||||
/// Options:
|
||||
@@ -379,20 +417,16 @@ fn freePatterns(patterns: []const []const u8) void {
|
||||
bun.default_allocator.free(patterns);
|
||||
}
|
||||
|
||||
/// Instance method: archive.blob(compress?)
|
||||
/// Returns Promise<Blob> with the archive data
|
||||
pub fn blob(this: *Archive, globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
const compress_arg = callframe.argumentsAsArray(1)[0];
|
||||
const use_gzip = try parseCompressArg(globalThis, compress_arg);
|
||||
return startBlobTask(globalThis, this.store, use_gzip, .blob);
|
||||
/// Instance method: archive.blob()
|
||||
/// Returns Promise<Blob> with the archive data (compressed if gzip was set in options)
|
||||
pub fn blob(this: *Archive, globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
return startBlobTask(globalThis, this.store, this.compress, .blob);
|
||||
}
|
||||
|
||||
/// Instance method: archive.bytes(compress?)
|
||||
/// Returns Promise<Uint8Array> with the archive data
|
||||
pub fn bytes(this: *Archive, globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
const compress_arg = callframe.argumentsAsArray(1)[0];
|
||||
const use_gzip = try parseCompressArg(globalThis, compress_arg);
|
||||
return startBlobTask(globalThis, this.store, use_gzip, .bytes);
|
||||
/// Instance method: archive.bytes()
|
||||
/// Returns Promise<Uint8Array> with the archive data (compressed if gzip was set in options)
|
||||
pub fn bytes(this: *Archive, globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!jsc.JSValue {
|
||||
return startBlobTask(globalThis, this.store, this.compress, .bytes);
|
||||
}
|
||||
|
||||
/// Instance method: archive.files(glob?)
|
||||
@@ -578,15 +612,17 @@ const BlobContext = struct {
|
||||
};
|
||||
|
||||
store: *jsc.WebCore.Blob.Store,
|
||||
use_gzip: bool,
|
||||
compress: Compression,
|
||||
output_type: OutputType,
|
||||
result: Result = .{ .uncompressed = {} },
|
||||
|
||||
fn run(this: *BlobContext) Result {
|
||||
if (this.use_gzip) {
|
||||
return .{ .compressed = compressGzip(this.store.sharedView()) catch |e| return .{ .err = e } };
|
||||
switch (this.compress) {
|
||||
.gzip => |opts| {
|
||||
return .{ .compressed = compressGzip(this.store.sharedView(), opts.level) catch |e| return .{ .err = e } };
|
||||
},
|
||||
.none => return .{ .uncompressed = {} },
|
||||
}
|
||||
return .{ .uncompressed = {} };
|
||||
}
|
||||
|
||||
fn runFromJS(this: *BlobContext, globalThis: *jsc.JSGlobalObject) bun.JSError!PromiseResult {
|
||||
@@ -617,13 +653,13 @@ const BlobContext = struct {
|
||||
|
||||
pub const BlobTask = AsyncTask(BlobContext);
|
||||
|
||||
fn startBlobTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Store, use_gzip: bool, output_type: BlobContext.OutputType) bun.JSError!jsc.JSValue {
|
||||
fn startBlobTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Store, compress: Compression, output_type: BlobContext.OutputType) bun.JSError!jsc.JSValue {
|
||||
store.ref();
|
||||
errdefer store.deref();
|
||||
|
||||
const task = try BlobTask.create(globalThis, .{
|
||||
.store = store,
|
||||
.use_gzip = use_gzip,
|
||||
.compress = compress,
|
||||
.output_type = output_type,
|
||||
});
|
||||
|
||||
@@ -646,7 +682,7 @@ const WriteContext = struct {
|
||||
|
||||
data: Data,
|
||||
path: [:0]const u8,
|
||||
use_gzip: bool,
|
||||
compress: Compression,
|
||||
result: Result = .{ .success = {} },
|
||||
|
||||
fn run(this: *WriteContext) Result {
|
||||
@@ -654,11 +690,11 @@ const WriteContext = struct {
|
||||
.owned => |d| d,
|
||||
.store => |s| s.sharedView(),
|
||||
};
|
||||
const data_to_write = if (this.use_gzip)
|
||||
compressGzip(source_data) catch |e| return .{ .err = e }
|
||||
else
|
||||
source_data;
|
||||
defer if (this.use_gzip) bun.default_allocator.free(data_to_write);
|
||||
const data_to_write = switch (this.compress) {
|
||||
.gzip => |opts| compressGzip(source_data, opts.level) catch |e| return .{ .err = e },
|
||||
.none => source_data,
|
||||
};
|
||||
defer if (this.compress != .none) bun.default_allocator.free(data_to_write);
|
||||
|
||||
const file = switch (bun.sys.File.openat(.cwd(), this.path, bun.O.CREAT | bun.O.WRONLY | bun.O.TRUNC, 0o644)) {
|
||||
.err => |err| return .{ .sys_err = err.clone(bun.default_allocator) },
|
||||
@@ -699,7 +735,7 @@ fn startWriteTask(
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
data: WriteContext.Data,
|
||||
path: []const u8,
|
||||
use_gzip: bool,
|
||||
compress: Compression,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
const path_z = try bun.default_allocator.dupeZ(u8, path);
|
||||
errdefer bun.default_allocator.free(path_z);
|
||||
@@ -714,7 +750,7 @@ fn startWriteTask(
|
||||
const task = try WriteTask.create(globalThis, .{
|
||||
.data = data,
|
||||
.path = path_z,
|
||||
.use_gzip = use_gzip,
|
||||
.compress = compress,
|
||||
});
|
||||
|
||||
const promise_js = task.promise.value();
|
||||
@@ -869,10 +905,10 @@ fn startFilesTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Stor
|
||||
// Helpers
|
||||
// ============================================================================
|
||||
|
||||
fn compressGzip(data: []const u8) ![]u8 {
|
||||
fn compressGzip(data: []const u8, level: u8) ![]u8 {
|
||||
libdeflate.load();
|
||||
|
||||
const compressor = libdeflate.Compressor.alloc(6) orelse return error.GzipInitFailed;
|
||||
const compressor = libdeflate.Compressor.alloc(@intCast(level)) orelse return error.GzipInitFailed;
|
||||
defer compressor.deinit();
|
||||
|
||||
const max_size = compressor.maxBytesNeeded(data, .gzip);
|
||||
|
||||
@@ -1484,6 +1484,12 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr
|
||||
}
|
||||
}
|
||||
|
||||
// Check for Archive - allows Bun.write() and S3 writes to accept Archive instances
|
||||
if (data.as(Archive)) |archive| {
|
||||
archive.store.ref();
|
||||
break :brk Blob.initWithStore(archive.store, globalThis);
|
||||
}
|
||||
|
||||
break :brk try Blob.get(
|
||||
globalThis,
|
||||
data,
|
||||
@@ -4828,6 +4834,7 @@ const NewReadFileHandler = read_file.NewReadFileHandler;
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const Archive = @import("../api/Archive.zig");
|
||||
const Environment = @import("../../env.zig");
|
||||
const S3File = @import("./S3File.zig");
|
||||
const std = @import("std");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1509,3 +1509,128 @@ describe.concurrent("s3 missing credentials", () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Archive + S3 integration tests
|
||||
describe.skipIf(!minioCredentials)("Archive with S3", () => {
|
||||
const credentials = minioCredentials!;
|
||||
|
||||
it("writes archive to S3 via S3Client.write()", async () => {
|
||||
const client = new Bun.S3Client(credentials);
|
||||
const archive = new Bun.Archive({
|
||||
"hello.txt": "Hello from Archive!",
|
||||
"data.json": JSON.stringify({ test: true }),
|
||||
});
|
||||
|
||||
const key = randomUUIDv7() + ".tar";
|
||||
await client.write(key, archive);
|
||||
|
||||
// Verify by downloading and reading back
|
||||
const downloaded = await client.file(key).bytes();
|
||||
const readArchive = new Bun.Archive(downloaded);
|
||||
const files = await readArchive.files();
|
||||
|
||||
expect(files.size).toBe(2);
|
||||
expect(await files.get("hello.txt")!.text()).toBe("Hello from Archive!");
|
||||
expect(await files.get("data.json")!.text()).toBe(JSON.stringify({ test: true }));
|
||||
|
||||
// Cleanup
|
||||
await client.unlink(key);
|
||||
});
|
||||
|
||||
it("writes archive to S3 via Bun.write() with s3:// URL", async () => {
|
||||
const archive = new Bun.Archive({
|
||||
"file1.txt": "content1",
|
||||
"dir/file2.txt": "content2",
|
||||
});
|
||||
|
||||
const key = randomUUIDv7() + ".tar";
|
||||
const s3Url = `s3://${credentials.bucket}/${key}`;
|
||||
|
||||
await Bun.write(s3Url, archive, {
|
||||
...credentials,
|
||||
});
|
||||
|
||||
// Verify by downloading
|
||||
const s3File = Bun.file(s3Url, credentials);
|
||||
const downloaded = await s3File.bytes();
|
||||
const readArchive = new Bun.Archive(downloaded);
|
||||
const files = await readArchive.files();
|
||||
|
||||
expect(files.size).toBe(2);
|
||||
expect(await files.get("file1.txt")!.text()).toBe("content1");
|
||||
expect(await files.get("dir/file2.txt")!.text()).toBe("content2");
|
||||
|
||||
// Cleanup
|
||||
await s3File.delete();
|
||||
});
|
||||
|
||||
it("writes archive with binary content to S3", async () => {
|
||||
const client = new Bun.S3Client(credentials);
|
||||
const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd, 0x80, 0x7f]);
|
||||
const archive = new Bun.Archive({
|
||||
"binary.bin": binaryData,
|
||||
});
|
||||
|
||||
const key = randomUUIDv7() + ".tar";
|
||||
await client.write(key, archive);
|
||||
|
||||
// Verify binary data is preserved
|
||||
const downloaded = await client.file(key).bytes();
|
||||
const readArchive = new Bun.Archive(downloaded);
|
||||
const files = await readArchive.files();
|
||||
const extractedBinary = await files.get("binary.bin")!.bytes();
|
||||
|
||||
expect(extractedBinary).toEqual(binaryData);
|
||||
|
||||
// Cleanup
|
||||
await client.unlink(key);
|
||||
});
|
||||
|
||||
it("writes large archive to S3", async () => {
|
||||
const client = new Bun.S3Client(credentials);
|
||||
|
||||
// Create archive with multiple files
|
||||
const entries: Record<string, string> = {};
|
||||
for (let i = 0; i < 50; i++) {
|
||||
entries[`file${i.toString().padStart(3, "0")}.txt`] = `Content for file ${i}`;
|
||||
}
|
||||
const archive = new Bun.Archive(entries);
|
||||
|
||||
const key = randomUUIDv7() + ".tar";
|
||||
await client.write(key, archive);
|
||||
|
||||
// Verify
|
||||
const downloaded = await client.file(key).bytes();
|
||||
const readArchive = new Bun.Archive(downloaded);
|
||||
const files = await readArchive.files();
|
||||
|
||||
expect(files.size).toBe(50);
|
||||
expect(await files.get("file000.txt")!.text()).toBe("Content for file 0");
|
||||
expect(await files.get("file049.txt")!.text()).toBe("Content for file 49");
|
||||
|
||||
// Cleanup
|
||||
await client.unlink(key);
|
||||
});
|
||||
|
||||
it("writes archive via s3File.write()", async () => {
|
||||
const client = new Bun.S3Client(credentials);
|
||||
const archive = new Bun.Archive({
|
||||
"test.txt": "Hello via s3File.write()!",
|
||||
});
|
||||
|
||||
const key = randomUUIDv7() + ".tar";
|
||||
const s3File = client.file(key);
|
||||
await s3File.write(archive);
|
||||
|
||||
// Verify
|
||||
const downloaded = await s3File.bytes();
|
||||
const readArchive = new Bun.Archive(downloaded);
|
||||
const files = await readArchive.files();
|
||||
|
||||
expect(files.size).toBe(1);
|
||||
expect(await files.get("test.txt")!.text()).toBe("Hello via s3File.write()!");
|
||||
|
||||
// Cleanup
|
||||
await s3File.delete();
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user