diff --git a/docs/runtime/archive.mdx b/docs/runtime/archive.mdx index a88a473711..e612ad17bb 100644 --- a/docs/runtime/archive.mdx +++ b/docs/runtime/archive.mdx @@ -10,21 +10,21 @@ Bun provides a fast, native implementation for working with tar archives through **Create an archive from files:** ```ts -const archive = Bun.Archive.from({ +const archive = new Bun.Archive({ "hello.txt": "Hello, World!", "data.json": JSON.stringify({ foo: "bar" }), "nested/file.txt": "Nested content", }); // Write to disk -await Bun.Archive.write("bundle.tar", archive); +await Bun.write("bundle.tar", archive); ``` **Extract an archive:** ```ts const tarball = await Bun.file("package.tar.gz").bytes(); -const archive = Bun.Archive.from(tarball); +const archive = new Bun.Archive(tarball); const entryCount = await archive.extract("./output"); console.log(`Extracted ${entryCount} entries`); ``` @@ -33,7 +33,7 @@ console.log(`Extracted ${entryCount} entries`); ```ts const tarball = await Bun.file("package.tar.gz").bytes(); -const archive = Bun.Archive.from(tarball); +const archive = new Bun.Archive(tarball); const files = await archive.files(); for (const [path, file] of files) { @@ -43,10 +43,11 @@ for (const [path, file] of files) { ## Creating Archives -Use `Bun.Archive.from()` to create an archive from an object where keys are file paths and values are file contents: +Use `new Bun.Archive()` to create an archive from an object where keys are file paths and values are file contents. By default, archives are uncompressed: ```ts -const archive = Bun.Archive.from({ +// Creates an uncompressed tar archive (default) +const archive = new Bun.Archive({ "README.md": "# My Project", "src/index.ts": "console.log('Hello');", "package.json": JSON.stringify({ name: "my-project" }), @@ -64,7 +65,7 @@ File contents can be: const data = "binary data"; const arrayBuffer = new ArrayBuffer(8); -const archive = Bun.Archive.from({ +const archive = new Bun.Archive({ "text.txt": "Plain text", "blob.bin": new Blob([data]), "bytes.bin": new Uint8Array([1, 2, 3, 4]), @@ -74,18 +75,19 @@ const archive = Bun.Archive.from({ ### Writing Archives to Disk -Use `Bun.Archive.write()` to create and write an archive in one operation: +Use `Bun.write()` to write an archive to disk: ```ts -// Write uncompressed tar -await Bun.Archive.write("output.tar", { +// Write uncompressed tar (default) +const archive = new Bun.Archive({ "file1.txt": "content1", "file2.txt": "content2", }); +await Bun.write("output.tar", archive); // Write gzipped tar -const files = { "src/index.ts": "console.log('Hello');" }; -await Bun.Archive.write("output.tar.gz", files, "gzip"); +const compressed = new Bun.Archive({ "src/index.ts": "console.log('Hello');" }, { compress: "gzip" }); +await Bun.write("output.tar.gz", compressed); ``` ### Getting Archive Bytes @@ -93,8 +95,7 @@ await Bun.Archive.write("output.tar.gz", files, "gzip"); Get the archive data as bytes or a Blob: ```ts -const files = { "hello.txt": "Hello, World!" }; -const archive = Bun.Archive.from(files); +const archive = new Bun.Archive({ "hello.txt": "Hello, World!" }); // As Uint8Array const bytes = await archive.bytes(); @@ -102,9 +103,10 @@ const bytes = await archive.bytes(); // As Blob const blob = await archive.blob(); -// With gzip compression -const gzippedBytes = await archive.bytes("gzip"); -const gzippedBlob = await archive.blob("gzip"); +// With gzip compression (set at construction) +const gzipped = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" }); +const gzippedBytes = await gzipped.bytes(); +const gzippedBlob = await gzipped.blob(); ``` ## Extracting Archives @@ -116,13 +118,13 @@ Create an archive from existing tar/tar.gz data: ```ts // From a file const tarball = await Bun.file("package.tar.gz").bytes(); -const archiveFromFile = Bun.Archive.from(tarball); +const archiveFromFile = new Bun.Archive(tarball); ``` ```ts // From a fetch response const response = await fetch("https://example.com/archive.tar.gz"); -const archiveFromFetch = Bun.Archive.from(await response.blob()); +const archiveFromFetch = new Bun.Archive(await response.blob()); ``` ### Extracting to Disk @@ -131,7 +133,7 @@ Use `.extract()` to write all files to a directory: ```ts const tarball = await Bun.file("package.tar.gz").bytes(); -const archive = Bun.Archive.from(tarball); +const archive = new Bun.Archive(tarball); const count = await archive.extract("./extracted"); console.log(`Extracted ${count} entries`); ``` @@ -148,7 +150,7 @@ Use glob patterns to extract only specific files. Patterns are matched against a ```ts const tarball = await Bun.file("package.tar.gz").bytes(); -const archive = Bun.Archive.from(tarball); +const archive = new Bun.Archive(tarball); // Extract only TypeScript files const tsCount = await archive.extract("./extracted", { glob: "**/*.ts" }); @@ -181,7 +183,7 @@ Use `.files()` to get archive contents as a `Map` of `File` objects without extr ```ts const tarball = await Bun.file("package.tar.gz").bytes(); -const archive = Bun.Archive.from(tarball); +const archive = new Bun.Archive(tarball); const files = await archive.files(); for (const [path, file] of files) { @@ -206,7 +208,7 @@ Archive operations can fail due to corrupted data, I/O errors, or invalid paths. ```ts try { const tarball = await Bun.file("package.tar.gz").bytes(); - const archive = Bun.Archive.from(tarball); + const archive = new Bun.Archive(tarball); const count = await archive.extract("./output"); console.log(`Extracted ${count} entries`); } catch (e: unknown) { @@ -227,7 +229,7 @@ try { Common error scenarios: -- **Corrupted/truncated archives** - `Archive.from()` loads the archive data; errors may be deferred until read/extract operations +- **Corrupted/truncated archives** - `new Archive()` loads the archive data; errors may be deferred until read/extract operations - **Permission denied** - `extract()` throws if the target directory is not writable - **Disk full** - `extract()` throws if there's insufficient space - **Invalid paths** - Operations throw for malformed file paths @@ -239,7 +241,7 @@ The count returned by `extract()` includes all successfully written entries (fil For additional security with untrusted archives, you can enumerate and validate paths before extraction: ```ts -const archive = Bun.Archive.from(untrustedData); +const archive = new Bun.Archive(untrustedData); const files = await archive.files(); // Optional: Custom validation for additional checks @@ -298,26 +300,28 @@ See [Bun.Glob](/docs/api/glob) for the full glob syntax including escaping and a ## Compression -Bun.Archive supports gzip compression for both reading and writing: +Bun.Archive creates uncompressed tar archives by default. Use `{ compress: "gzip" }` to enable gzip compression: ```ts +// Default: uncompressed tar +const archive = new Bun.Archive({ "hello.txt": "Hello, World!" }); + // Reading: automatically detects gzip const gzippedTarball = await Bun.file("archive.tar.gz").bytes(); -const archive = Bun.Archive.from(gzippedTarball); +const readArchive = new Bun.Archive(gzippedTarball); -// Writing: specify compression -const files = { "hello.txt": "Hello, World!" }; -await Bun.Archive.write("output.tar.gz", files, "gzip"); +// Enable gzip compression +const compressed = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip" }); -// Getting bytes: specify compression -const gzippedBytes = await archive.bytes("gzip"); +// Gzip with custom level (1-12) +const maxCompression = new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 12 }); ``` -The compression argument accepts: +The options accept: -- `"gzip"` - Enable gzip compression -- `true` - Same as `"gzip"` -- `false` or `undefined` - No compression +- No options or `undefined` - Uncompressed tar (default) +- `{ compress: "gzip" }` - Enable gzip compression at level 6 +- `{ compress: "gzip", level: number }` - Gzip with custom level 1-12 (1 = fastest, 12 = smallest) ## Examples @@ -339,15 +343,16 @@ for await (const path of glob.scan(".")) { // Add package.json files["package.json"] = await Bun.file("package.json").text(); -// Create compressed archive -await Bun.Archive.write("bundle.tar.gz", files, "gzip"); +// Create compressed archive and write to disk +const archive = new Bun.Archive(files, { compress: "gzip" }); +await Bun.write("bundle.tar.gz", archive); ``` ### Extract and Process npm Package ```ts const response = await fetch("https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"); -const archive = Bun.Archive.from(await response.blob()); +const archive = new Bun.Archive(await response.blob()); // Get package.json const files = await archive.files("package/package.json"); @@ -365,7 +370,7 @@ if (packageJson) { import { readdir } from "node:fs/promises"; import { join } from "node:path"; -async function archiveDirectory(dir: string): Promise { +async function archiveDirectory(dir: string, compress = false): Promise { const files: Record = {}; async function walk(currentDir: string, prefix: string = "") { @@ -384,11 +389,11 @@ async function archiveDirectory(dir: string): Promise { } await walk(dir); - return Bun.Archive.from(files); + return new Bun.Archive(files, compress ? { compress: "gzip" } : undefined); } -const archive = await archiveDirectory("./my-project"); -await Bun.Archive.write("my-project.tar.gz", archive, "gzip"); +const archive = await archiveDirectory("./my-project", true); +await Bun.write("my-project.tar.gz", archive); ``` ## Reference @@ -396,14 +401,19 @@ await Bun.Archive.write("my-project.tar.gz", archive, "gzip"); > **Note**: The following type signatures are simplified for documentation purposes. See [`packages/bun-types/bun.d.ts`](https://github.com/oven-sh/bun/blob/main/packages/bun-types/bun.d.ts) for the full type definitions. ```ts -type ArchiveCompression = "gzip" | boolean; - type ArchiveInput = | Record | Blob | Bun.ArrayBufferView | ArrayBufferLike; +type ArchiveOptions = { + /** Compression algorithm. Currently only "gzip" is supported. */ + compress?: "gzip"; + /** Compression level 1-12 (default 6 when gzip is enabled). */ + level?: number; +}; + interface ArchiveExtractOptions { /** Glob pattern(s) to filter extraction. Supports negative patterns with "!" prefix. */ glob?: string | readonly string[]; @@ -412,13 +422,11 @@ interface ArchiveExtractOptions { class Archive { /** * Create an Archive from input data + * @param data - Files to archive (as object) or existing archive data (as bytes/blob) + * @param options - Compression options. Uncompressed by default. + * Pass { compress: "gzip" } to enable compression. */ - static from(data: ArchiveInput): Archive; - - /** - * Write an archive directly to disk - */ - static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise; + constructor(data: ArchiveInput, options?: ArchiveOptions); /** * Extract archive to a directory @@ -427,14 +435,14 @@ class Archive { extract(path: string, options?: ArchiveExtractOptions): Promise; /** - * Get archive as a Blob + * Get archive as a Blob (uses compression setting from constructor) */ - blob(compress?: ArchiveCompression): Promise; + blob(): Promise; /** - * Get archive as a Uint8Array + * Get archive as a Uint8Array (uses compression setting from constructor) */ - bytes(compress?: ArchiveCompression): Promise>; + bytes(): Promise>; /** * Get archive contents as File objects (regular files only, no directories) diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index 38493f9b77..543d973cc8 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -750,7 +750,7 @@ declare module "bun" { */ function write( destination: BunFile | S3File | PathLike, - input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[], + input: Blob | NodeJS.TypedArray | ArrayBufferLike | string | BlobPart[] | Archive, options?: { /** * If writing to a PathLike, set the permissions of the file. @@ -6975,15 +6975,44 @@ declare module "bun" { /** * Compression format for archive output. - * - `"gzip"` - Compress with gzip - * - `true` - Same as `"gzip"` - * - `false` - Explicitly disable compression (no compression) - * - `undefined` - No compression (default behavior when omitted) - * - * Both `false` and `undefined` result in no compression; `false` can be used - * to explicitly indicate "no compression" in code where the intent should be clear. + * Currently only `"gzip"` is supported. */ - type ArchiveCompression = "gzip" | boolean; + type ArchiveCompression = "gzip"; + + /** + * Options for creating an Archive instance. + * + * By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression. + * + * @example + * ```ts + * // No compression (default) + * new Bun.Archive(data); + * + * // Enable gzip with default level (6) + * new Bun.Archive(data, { compress: "gzip" }); + * + * // Specify compression level + * new Bun.Archive(data, { compress: "gzip", level: 9 }); + * ``` + */ + interface ArchiveOptions { + /** + * Compression algorithm to use. + * Currently only "gzip" is supported. + * If not specified, no compression is applied. + */ + compress?: ArchiveCompression; + /** + * Compression level (1-12). Only applies when `compress` is set. + * - 1: Fastest compression, lowest ratio + * - 6: Default balance of speed and ratio + * - 12: Best compression ratio, slowest + * + * @default 6 + */ + level?: number; + } /** * Options for extracting archive contents. @@ -7031,7 +7060,7 @@ declare module "bun" { * @example * **Create an archive from an object:** * ```ts - * const archive = Bun.Archive.from({ + * const archive = new Bun.Archive({ * "hello.txt": "Hello, World!", * "data.json": JSON.stringify({ foo: "bar" }), * "binary.bin": new Uint8Array([1, 2, 3, 4]), @@ -7039,9 +7068,20 @@ declare module "bun" { * ``` * * @example + * **Create a gzipped archive:** + * ```ts + * const archive = new Bun.Archive({ + * "hello.txt": "Hello, World!", + * }, { compress: "gzip" }); + * + * // Or with a specific compression level (1-12) + * const archive = new Bun.Archive(data, { compress: "gzip", level: 9 }); + * ``` + * + * @example * **Extract an archive to disk:** * ```ts - * const archive = Bun.Archive.from(tarballBytes); + * const archive = new Bun.Archive(tarballBytes); * const entryCount = await archive.extract("./output"); * console.log(`Extracted ${entryCount} entries`); * ``` @@ -7049,7 +7089,7 @@ declare module "bun" { * @example * **Get archive contents as a Map of File objects:** * ```ts - * const archive = Bun.Archive.from(tarballBytes); + * const archive = new Bun.Archive(tarballBytes); * const entries = await archive.files(); * for (const [path, file] of entries) { * console.log(path, await file.text()); @@ -7062,36 +7102,50 @@ declare module "bun" { * await Bun.Archive.write("bundle.tar.gz", { * "src/index.ts": sourceCode, * "package.json": packageJson, - * }, "gzip"); + * }, { compress: "gzip" }); * ``` */ export class Archive { /** * Create an `Archive` instance from input data. * + * By default, archives are not compressed. Use `{ compress: "gzip" }` to enable compression. + * * @param data - The input data for the archive: * - **Object**: Creates a new tarball with the object's keys as file paths and values as file contents * - **Blob/TypedArray/ArrayBuffer**: Wraps existing archive data (tar or tar.gz) - * - * @returns A new `Archive` instance + * @param options - Optional archive options including compression settings. + * Defaults to no compression if omitted. * * @example - * **From an object (creates new tarball):** + * **From an object (creates uncompressed tarball):** * ```ts - * const archive = Bun.Archive.from({ + * const archive = new Bun.Archive({ * "hello.txt": "Hello, World!", * "nested/file.txt": "Nested content", * }); * ``` * * @example + * **With gzip compression:** + * ```ts + * const archive = new Bun.Archive(data, { compress: "gzip" }); + * ``` + * + * @example + * **With explicit gzip compression level:** + * ```ts + * const archive = new Bun.Archive(data, { compress: "gzip", level: 12 }); + * ``` + * + * @example * **From existing archive data:** * ```ts * const response = await fetch("https://example.com/package.tar.gz"); - * const archive = Bun.Archive.from(await response.blob()); + * const archive = new Bun.Archive(await response.blob()); * ``` */ - static from(data: ArchiveInput): Archive; + constructor(data: ArchiveInput, options?: ArchiveOptions); /** * Create and write an archive directly to disk in one operation. @@ -7100,8 +7154,8 @@ declare module "bun" { * as it streams the data directly to disk. * * @param path - The file path to write the archive to - * @param data - The input data for the archive (same as `Archive.from()`) - * @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none + * @param data - The input data for the archive (same as `new Archive()`) + * @param options - Optional archive options including compression settings * * @returns A promise that resolves when the write is complete * @@ -7117,10 +7171,10 @@ declare module "bun" { * @example * **Write gzipped tarball:** * ```ts - * await Bun.Archive.write("output.tar.gz", files, "gzip"); + * await Bun.Archive.write("output.tar.gz", files, { compress: "gzip" }); * ``` */ - static write(path: string, data: ArchiveInput | Archive, compress?: ArchiveCompression): Promise; + static write(path: string, data: ArchiveInput | Archive, options?: ArchiveOptions): Promise; /** * Extract the archive contents to a directory on disk. @@ -7136,7 +7190,7 @@ declare module "bun" { * @example * **Extract all entries:** * ```ts - * const archive = Bun.Archive.from(tarballBytes); + * const archive = new Bun.Archive(tarballBytes); * const count = await archive.extract("./extracted"); * console.log(`Extracted ${count} entries`); * ``` @@ -7166,42 +7220,48 @@ declare module "bun" { /** * Get the archive contents as a `Blob`. * - * @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none + * Uses the compression settings specified when the Archive was created. + * * @returns A promise that resolves with the archive data as a Blob * * @example - * **Get uncompressed tarball:** + * **Get tarball as Blob:** * ```ts + * const archive = new Bun.Archive(data); * const blob = await archive.blob(); * ``` * * @example - * **Get gzipped tarball:** + * **Get gzipped tarball as Blob:** * ```ts - * const gzippedBlob = await archive.blob("gzip"); + * const archive = new Bun.Archive(data, { compress: "gzip" }); + * const gzippedBlob = await archive.blob(); * ``` */ - blob(compress?: ArchiveCompression): Promise; + blob(): Promise; /** * Get the archive contents as a `Uint8Array`. * - * @param compress - Optional compression: `"gzip"`, `true` for gzip, or `false`/`undefined` for none + * Uses the compression settings specified when the Archive was created. + * * @returns A promise that resolves with the archive data as a Uint8Array * * @example - * **Get uncompressed tarball bytes:** + * **Get tarball bytes:** * ```ts + * const archive = new Bun.Archive(data); * const bytes = await archive.bytes(); * ``` * * @example * **Get gzipped tarball bytes:** * ```ts - * const gzippedBytes = await archive.bytes("gzip"); + * const archive = new Bun.Archive(data, { compress: "gzip" }); + * const gzippedBytes = await archive.bytes(); * ``` */ - bytes(compress?: ArchiveCompression): Promise>; + bytes(): Promise>; /** * Get the archive contents as a `Map` of `File` objects. diff --git a/packages/bun-types/s3.d.ts b/packages/bun-types/s3.d.ts index 381df89bac..d992764dca 100644 --- a/packages/bun-types/s3.d.ts +++ b/packages/bun-types/s3.d.ts @@ -609,7 +609,17 @@ declare module "bun" { * }); */ write( - data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer | Request | Response | BunFile | S3File | Blob, + data: + | string + | ArrayBufferView + | ArrayBuffer + | SharedArrayBuffer + | Request + | Response + | BunFile + | S3File + | Blob + | Archive, options?: S3Options, ): Promise; @@ -920,7 +930,8 @@ declare module "bun" { | BunFile | S3File | Blob - | File, + | File + | Archive, options?: S3Options, ): Promise; @@ -970,7 +981,8 @@ declare module "bun" { | BunFile | S3File | Blob - | File, + | File + | Archive, options?: S3Options, ): Promise; diff --git a/src/bun.js/api/Archive.classes.ts b/src/bun.js/api/Archive.classes.ts index 64c35caa44..45afe3a617 100644 --- a/src/bun.js/api/Archive.classes.ts +++ b/src/bun.js/api/Archive.classes.ts @@ -8,10 +8,6 @@ export default [ configurable: false, JSType: "0b11101110", klass: { - from: { - fn: "from", - length: 1, - }, write: { fn: "write", length: 2, diff --git a/src/bun.js/api/Archive.zig b/src/bun.js/api/Archive.zig index f8a0803f26..bd1c65d6f9 100644 --- a/src/bun.js/api/Archive.zig +++ b/src/bun.js/api/Archive.zig @@ -5,8 +5,19 @@ pub const toJS = js.toJS; pub const fromJS = js.fromJS; pub const fromJSDirect = js.fromJSDirect; +/// Compression options for the archive +pub const Compression = union(enum) { + none, + gzip: struct { + /// Compression level: 1 (fastest) to 12 (maximum compression). Default is 6. + level: u8 = 6, + }, +}; + /// The underlying data for the archive - uses Blob.Store for thread-safe ref counting store: *jsc.WebCore.Blob.Store, +/// Compression settings for this archive +compress: Compression = .none, pub fn finalize(this: *Archive) void { jsc.markBinding(@src()); @@ -65,47 +76,95 @@ fn countFilesInArchive(data: []const u8) u32 { return count; } -/// Constructor: new Archive() - throws an error since users should use Archive.from() -pub fn constructor(globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!*Archive { - return globalThis.throwInvalidArguments("Archive cannot be constructed directly. Use Archive.from() instead.", .{}); -} - -/// Static method: Archive.from(data) +/// Constructor: new Archive(data, options?) /// Creates an Archive from either: /// - An object { [path: string]: Blob | string | ArrayBufferView | ArrayBufferLike } /// - A Blob, ArrayBufferView, or ArrayBufferLike (assumes it's already a valid archive) -pub fn from(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue { - const arg = callframe.argumentsAsArray(1)[0]; - if (arg == .zero) { - return globalThis.throwInvalidArguments("Archive.from requires an argument", .{}); +/// Options: +/// - compress: "gzip" - Enable gzip compression +/// - level: number (1-12) - Compression level (default 6) +/// When no options are provided, no compression is applied +pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!*Archive { + const data_arg, const options_arg = callframe.argumentsAsArray(2); + if (data_arg == .zero) { + return globalThis.throwInvalidArguments("new Archive() requires an argument", .{}); } + // Parse compression options + const compress = try parseCompressionOptions(globalThis, options_arg); + // For Blob/Archive, ref the existing store (zero-copy) - if (arg.as(jsc.WebCore.Blob)) |blob_ptr| { + if (data_arg.as(jsc.WebCore.Blob)) |blob_ptr| { if (blob_ptr.store) |store| { store.ref(); - return bun.new(Archive, .{ .store = store }).toJS(globalThis); + return bun.new(Archive, .{ .store = store, .compress = compress }); } } // For ArrayBuffer/TypedArray, copy the data - if (arg.asArrayBuffer(globalThis)) |array_buffer| { + if (data_arg.asArrayBuffer(globalThis)) |array_buffer| { const data = try bun.default_allocator.dupe(u8, array_buffer.slice()); - return createArchive(globalThis, data); + return createArchive(data, compress); } // For plain objects, build a tarball - if (arg.isObject()) { - const data = try buildTarballFromObject(globalThis, arg); - return createArchive(globalThis, data); + if (data_arg.isObject()) { + const data = try buildTarballFromObject(globalThis, data_arg); + return createArchive(data, compress); } return globalThis.throwInvalidArguments("Expected an object, Blob, TypedArray, or ArrayBuffer", .{}); } -fn createArchive(globalThis: *jsc.JSGlobalObject, data: []u8) jsc.JSValue { +/// Parse compression options from JS value +/// Returns .none if no compression specified, caller must handle defaults +fn parseCompressionOptions(globalThis: *jsc.JSGlobalObject, options_arg: jsc.JSValue) bun.JSError!Compression { + // No options provided means no compression (caller handles defaults) + if (options_arg.isUndefinedOrNull()) { + return .none; + } + + if (!options_arg.isObject()) { + return globalThis.throwInvalidArguments("Archive: options must be an object", .{}); + } + + // Check for compress option + if (try options_arg.getTruthy(globalThis, "compress")) |compress_val| { + // compress must be "gzip" + if (!compress_val.isString()) { + return globalThis.throwInvalidArguments("Archive: compress option must be a string", .{}); + } + + const compress_str = try compress_val.toSlice(globalThis, bun.default_allocator); + defer compress_str.deinit(); + + if (!bun.strings.eqlComptime(compress_str.slice(), "gzip")) { + return globalThis.throwInvalidArguments("Archive: compress option must be \"gzip\"", .{}); + } + + // Parse level option (1-12, default 6) + var level: u8 = 6; + if (try options_arg.getTruthy(globalThis, "level")) |level_val| { + if (!level_val.isNumber()) { + return globalThis.throwInvalidArguments("Archive: level must be a number", .{}); + } + const level_num = level_val.toInt64(); + if (level_num < 1 or level_num > 12) { + return globalThis.throwInvalidArguments("Archive: level must be between 1 and 12", .{}); + } + level = @intCast(level_num); + } + + return .{ .gzip = .{ .level = level } }; + } + + // No compress option specified in options object means no compression + return .none; +} + +fn createArchive(data: []u8, compress: Compression) *Archive { const store = jsc.WebCore.Blob.Store.init(data, bun.default_allocator); - return bun.new(Archive, .{ .store = store }).toJS(globalThis); + return bun.new(Archive, .{ .store = store, .compress = compress }); } /// Shared helper that builds tarball bytes from a JS object @@ -212,12 +271,15 @@ fn getEntryData(globalThis: *jsc.JSGlobalObject, value: jsc.JSValue, allocator: return value.toSlice(globalThis, allocator); } -/// Static method: Archive.write(path, data, compress?) -/// Creates and writes an archive to disk in one operation +/// Static method: Archive.write(path, data, options?) +/// Creates and writes an archive to disk in one operation. +/// For Archive instances, uses the archive's compression settings unless overridden by options. +/// Options: +/// - gzip: { level?: number } - Override compression settings pub fn write(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue { - const path_arg, const data_arg, const compress_arg = callframe.argumentsAsArray(3); + const path_arg, const data_arg, const options_arg = callframe.argumentsAsArray(3); if (data_arg == .zero) { - return globalThis.throwInvalidArguments("Archive.write requires at least 2 arguments (path, data)", .{}); + return globalThis.throwInvalidArguments("Archive.write requires 2 arguments (path, data)", .{}); } // Get the path @@ -228,61 +290,37 @@ pub fn write(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSE const path_slice = try path_arg.toSlice(globalThis, bun.default_allocator); defer path_slice.deinit(); - // Determine compression - const use_gzip = try parseCompressArg(globalThis, compress_arg); + // Parse options for compression override + const options_compress = try parseCompressionOptions(globalThis, options_arg); - // Try to use store reference (zero-copy) for Archive/Blob + // For Archive instances, use options override or archive's compression settings if (fromJS(data_arg)) |archive| { - return startWriteTask(globalThis, .{ .store = archive.store }, path_slice.slice(), use_gzip); + const compress = if (options_compress != .none) options_compress else archive.compress; + return startWriteTask(globalThis, .{ .store = archive.store }, path_slice.slice(), compress); } + // For Blobs, use store reference with options compression if (data_arg.as(jsc.WebCore.Blob)) |blob_ptr| { if (blob_ptr.store) |store| { - return startWriteTask(globalThis, .{ .store = store }, path_slice.slice(), use_gzip); + return startWriteTask(globalThis, .{ .store = store }, path_slice.slice(), options_compress); } } - // Fall back to copying data for ArrayBuffer/TypedArray/objects - const archive_data = try getArchiveData(globalThis, data_arg); - return startWriteTask(globalThis, .{ .owned = archive_data }, path_slice.slice(), use_gzip); -} - -/// Get archive data from a value, returning owned bytes -fn getArchiveData(globalThis: *jsc.JSGlobalObject, arg: jsc.JSValue) bun.JSError![]u8 { - // Check if it's a typed array, ArrayBuffer, or similar - if (arg.asArrayBuffer(globalThis)) |array_buffer| { - return bun.default_allocator.dupe(u8, array_buffer.slice()); + // For ArrayBuffer/TypedArray, copy the data with options compression + if (data_arg.asArrayBuffer(globalThis)) |array_buffer| { + const data = try bun.default_allocator.dupe(u8, array_buffer.slice()); + return startWriteTask(globalThis, .{ .owned = data }, path_slice.slice(), options_compress); } - // Check if it's an object with entries (plain object) - build tarball - if (arg.isObject()) { - return buildTarballFromObject(globalThis, arg); + // For plain objects, build a tarball with options compression + if (data_arg.isObject()) { + const data = try buildTarballFromObject(globalThis, data_arg); + return startWriteTask(globalThis, .{ .owned = data }, path_slice.slice(), options_compress); } return globalThis.throwInvalidArguments("Expected an object, Blob, TypedArray, ArrayBuffer, or Archive", .{}); } -fn parseCompressArg(globalThis: *jsc.JSGlobalObject, arg: jsc.JSValue) bun.JSError!bool { - if (arg.isUndefinedOrNull()) { - return false; - } - - if (arg.isBoolean()) { - return arg.toBoolean(); - } - - if (arg.isString()) { - const str = try arg.toSlice(globalThis, bun.default_allocator); - defer str.deinit(); - if (std.mem.eql(u8, str.slice(), "gzip")) { - return true; - } - return globalThis.throwInvalidArguments("Archive: compress argument must be 'gzip', a boolean, or undefined", .{}); - } - - return globalThis.throwInvalidArguments("Archive: compress argument must be 'gzip', a boolean, or undefined", .{}); -} - /// Instance method: archive.extract(path, options?) /// Extracts the archive to the given path /// Options: @@ -379,20 +417,16 @@ fn freePatterns(patterns: []const []const u8) void { bun.default_allocator.free(patterns); } -/// Instance method: archive.blob(compress?) -/// Returns Promise with the archive data -pub fn blob(this: *Archive, globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue { - const compress_arg = callframe.argumentsAsArray(1)[0]; - const use_gzip = try parseCompressArg(globalThis, compress_arg); - return startBlobTask(globalThis, this.store, use_gzip, .blob); +/// Instance method: archive.blob() +/// Returns Promise with the archive data (compressed if gzip was set in options) +pub fn blob(this: *Archive, globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!jsc.JSValue { + return startBlobTask(globalThis, this.store, this.compress, .blob); } -/// Instance method: archive.bytes(compress?) -/// Returns Promise with the archive data -pub fn bytes(this: *Archive, globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue { - const compress_arg = callframe.argumentsAsArray(1)[0]; - const use_gzip = try parseCompressArg(globalThis, compress_arg); - return startBlobTask(globalThis, this.store, use_gzip, .bytes); +/// Instance method: archive.bytes() +/// Returns Promise with the archive data (compressed if gzip was set in options) +pub fn bytes(this: *Archive, globalThis: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!jsc.JSValue { + return startBlobTask(globalThis, this.store, this.compress, .bytes); } /// Instance method: archive.files(glob?) @@ -578,15 +612,17 @@ const BlobContext = struct { }; store: *jsc.WebCore.Blob.Store, - use_gzip: bool, + compress: Compression, output_type: OutputType, result: Result = .{ .uncompressed = {} }, fn run(this: *BlobContext) Result { - if (this.use_gzip) { - return .{ .compressed = compressGzip(this.store.sharedView()) catch |e| return .{ .err = e } }; + switch (this.compress) { + .gzip => |opts| { + return .{ .compressed = compressGzip(this.store.sharedView(), opts.level) catch |e| return .{ .err = e } }; + }, + .none => return .{ .uncompressed = {} }, } - return .{ .uncompressed = {} }; } fn runFromJS(this: *BlobContext, globalThis: *jsc.JSGlobalObject) bun.JSError!PromiseResult { @@ -617,13 +653,13 @@ const BlobContext = struct { pub const BlobTask = AsyncTask(BlobContext); -fn startBlobTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Store, use_gzip: bool, output_type: BlobContext.OutputType) bun.JSError!jsc.JSValue { +fn startBlobTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Store, compress: Compression, output_type: BlobContext.OutputType) bun.JSError!jsc.JSValue { store.ref(); errdefer store.deref(); const task = try BlobTask.create(globalThis, .{ .store = store, - .use_gzip = use_gzip, + .compress = compress, .output_type = output_type, }); @@ -646,7 +682,7 @@ const WriteContext = struct { data: Data, path: [:0]const u8, - use_gzip: bool, + compress: Compression, result: Result = .{ .success = {} }, fn run(this: *WriteContext) Result { @@ -654,11 +690,11 @@ const WriteContext = struct { .owned => |d| d, .store => |s| s.sharedView(), }; - const data_to_write = if (this.use_gzip) - compressGzip(source_data) catch |e| return .{ .err = e } - else - source_data; - defer if (this.use_gzip) bun.default_allocator.free(data_to_write); + const data_to_write = switch (this.compress) { + .gzip => |opts| compressGzip(source_data, opts.level) catch |e| return .{ .err = e }, + .none => source_data, + }; + defer if (this.compress != .none) bun.default_allocator.free(data_to_write); const file = switch (bun.sys.File.openat(.cwd(), this.path, bun.O.CREAT | bun.O.WRONLY | bun.O.TRUNC, 0o644)) { .err => |err| return .{ .sys_err = err.clone(bun.default_allocator) }, @@ -699,7 +735,7 @@ fn startWriteTask( globalThis: *jsc.JSGlobalObject, data: WriteContext.Data, path: []const u8, - use_gzip: bool, + compress: Compression, ) bun.JSError!jsc.JSValue { const path_z = try bun.default_allocator.dupeZ(u8, path); errdefer bun.default_allocator.free(path_z); @@ -714,7 +750,7 @@ fn startWriteTask( const task = try WriteTask.create(globalThis, .{ .data = data, .path = path_z, - .use_gzip = use_gzip, + .compress = compress, }); const promise_js = task.promise.value(); @@ -869,10 +905,10 @@ fn startFilesTask(globalThis: *jsc.JSGlobalObject, store: *jsc.WebCore.Blob.Stor // Helpers // ============================================================================ -fn compressGzip(data: []const u8) ![]u8 { +fn compressGzip(data: []const u8, level: u8) ![]u8 { libdeflate.load(); - const compressor = libdeflate.Compressor.alloc(6) orelse return error.GzipInitFailed; + const compressor = libdeflate.Compressor.alloc(@intCast(level)) orelse return error.GzipInitFailed; defer compressor.deinit(); const max_size = compressor.maxBytesNeeded(data, .gzip); diff --git a/src/bun.js/webcore/Blob.zig b/src/bun.js/webcore/Blob.zig index 52296ccc5a..8e06b05c47 100644 --- a/src/bun.js/webcore/Blob.zig +++ b/src/bun.js/webcore/Blob.zig @@ -1484,6 +1484,12 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr } } + // Check for Archive - allows Bun.write() and S3 writes to accept Archive instances + if (data.as(Archive)) |archive| { + archive.store.ref(); + break :brk Blob.initWithStore(archive.store, globalThis); + } + break :brk try Blob.get( globalThis, data, @@ -4828,6 +4834,7 @@ const NewReadFileHandler = read_file.NewReadFileHandler; const string = []const u8; +const Archive = @import("../api/Archive.zig"); const Environment = @import("../../env.zig"); const S3File = @import("./S3File.zig"); const std = @import("std"); diff --git a/test/js/bun/archive.test.ts b/test/js/bun/archive.test.ts index 58882c07d4..50e751f8c1 100644 --- a/test/js/bun/archive.test.ts +++ b/test/js/bun/archive.test.ts @@ -3,9 +3,9 @@ import { tempDir } from "harness"; import { join } from "path"; describe("Bun.Archive", () => { - describe("Archive.from", () => { + describe("new Archive()", () => { test("creates archive from object with string values", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", "data.json": JSON.stringify({ foo: "bar" }), }); @@ -14,7 +14,7 @@ describe("Bun.Archive", () => { }); test("creates archive from object with Blob values", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "blob1.txt": new Blob(["Hello from Blob"]), "blob2.txt": new Blob(["Another Blob"]), }); @@ -24,7 +24,7 @@ describe("Bun.Archive", () => { test("creates archive from object with Uint8Array values", async () => { const encoder = new TextEncoder(); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "bytes1.txt": encoder.encode("Hello from Uint8Array"), "bytes2.txt": encoder.encode("Another Uint8Array"), }); @@ -34,7 +34,7 @@ describe("Bun.Archive", () => { test("creates archive from object with ArrayBuffer values", async () => { const encoder = new TextEncoder(); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "buffer1.txt": encoder.encode("Hello from ArrayBuffer").buffer, "buffer2.txt": encoder.encode("Another ArrayBuffer").buffer, }); @@ -44,7 +44,7 @@ describe("Bun.Archive", () => { test("creates archive from object with mixed value types", async () => { const encoder = new TextEncoder(); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "string.txt": "String content", "blob.txt": new Blob(["Blob content"]), "uint8.txt": encoder.encode("Uint8Array content"), @@ -56,7 +56,7 @@ describe("Bun.Archive", () => { test("creates archive from Blob", async () => { // First create an archive with some content - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); @@ -64,35 +64,35 @@ describe("Bun.Archive", () => { expect(blob).toBeInstanceOf(Blob); // Create new archive from the blob - const archive = Bun.Archive.from(blob); + const archive = new Bun.Archive(blob); expect(archive).toBeInstanceOf(Bun.Archive); }); test("creates archive from ArrayBuffer", async () => { - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); const bytes = await sourceArchive.bytes(); const buffer = bytes.buffer; - const archive = Bun.Archive.from(buffer); + const archive = new Bun.Archive(buffer); expect(archive).toBeInstanceOf(Bun.Archive); }); test("creates archive from Uint8Array", async () => { - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); const bytes = await sourceArchive.bytes(); - const archive = Bun.Archive.from(bytes); + const archive = new Bun.Archive(bytes); expect(archive).toBeInstanceOf(Bun.Archive); }); test("creates archive with nested directory structure", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "root.txt": "Root file", "dir1/file1.txt": "File in dir1", "dir1/dir2/file2.txt": "File in dir1/dir2", @@ -103,7 +103,7 @@ describe("Bun.Archive", () => { }); test("creates archive with empty string value", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "empty.txt": "", }); @@ -113,38 +113,38 @@ describe("Bun.Archive", () => { test("throws with no arguments", () => { expect(() => { // @ts-expect-error - testing runtime behavior - Bun.Archive.from(); + new Bun.Archive(); }).toThrow(); }); test("throws with invalid input type (number)", () => { expect(() => { // @ts-expect-error - testing runtime behavior - Bun.Archive.from(123); + new Bun.Archive(123); }).toThrow(); }); test("throws with invalid input type (null)", () => { expect(() => { // @ts-expect-error - testing runtime behavior - Bun.Archive.from(null); + new Bun.Archive(null); }).toThrow(); }); test("converts non-string/buffer values to strings", async () => { // @ts-expect-error - testing runtime behavior - const archive = Bun.Archive.from({ "file.txt": 123 }); + const archive = new Bun.Archive({ "file.txt": 123 }, {}); // The archive should be created successfully - number is converted to string expect(archive).toBeDefined(); const bytes = await archive.bytes(); - // Should contain "123" somewhere in the tarball + // Should contain "123" somewhere in the tarball (use {} to get uncompressed tar) expect(new TextDecoder().decode(bytes)).toContain("123"); }); }); describe("archive.blob()", () => { test("returns a Blob", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -154,7 +154,7 @@ describe("Bun.Archive", () => { }); test("returns consistent output for same input", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -163,13 +163,22 @@ describe("Bun.Archive", () => { expect(blob1.size).toBe(blob2.size); }); - test("with gzip returns gzipped blob", async () => { - const archive = Bun.Archive.from({ - "hello.txt": "Hello, World!", - }); + test("with gzip option returns gzipped blob", async () => { + const regularArchive = new Bun.Archive( + { + "hello.txt": "Hello, World!", + }, + {}, // Empty options = no compression + ); + const gzipArchive = new Bun.Archive( + { + "hello.txt": "Hello, World!", + }, + { compress: "gzip" }, + ); - const regularBlob = await archive.blob(); - const gzippedBlob = await archive.blob("gzip"); + const regularBlob = await regularArchive.blob(); + const gzippedBlob = await gzipArchive.blob(); expect(gzippedBlob).toBeInstanceOf(Blob); // Gzipped should be different size @@ -178,32 +187,77 @@ describe("Bun.Archive", () => { test("gzip is smaller for larger repetitive data", async () => { const largeContent = Buffer.alloc(13000, "Hello, World!"); - const archive = Bun.Archive.from({ - "large.txt": largeContent, - }); + const regularArchive = new Bun.Archive( + { + "large.txt": largeContent, + }, + {}, // Empty options = no compression + ); + const gzipArchive = new Bun.Archive( + { + "large.txt": largeContent, + }, + { compress: "gzip" }, + ); - const regularBlob = await archive.blob(); - const gzippedBlob = await archive.blob("gzip"); + const regularBlob = await regularArchive.blob(); + const gzippedBlob = await gzipArchive.blob(); // For large repetitive data, gzip should be smaller expect(gzippedBlob.size).toBeLessThan(regularBlob.size); }); - test("throws with invalid compress argument", async () => { - const archive = Bun.Archive.from({ - "hello.txt": "Hello, World!", + test("gzip level affects compression ratio", async () => { + const largeContent = Buffer.alloc(50000, "Hello, World!"); + const level1Archive = new Bun.Archive({ "large.txt": largeContent }, { compress: "gzip", level: 1 }); + const level12Archive = new Bun.Archive({ "large.txt": largeContent }, { compress: "gzip", level: 12 }); + + const level1Blob = await level1Archive.blob(); + const level12Blob = await level12Archive.blob(); + + // Level 12 should produce smaller output than level 1 + expect(level12Blob.size).toBeLessThan(level1Blob.size); + }); + + test("defaults to no compression when no options provided", async () => { + const largeContent = Buffer.alloc(13000, "Hello, World!"); + + // No options = no compression + const defaultArchive = new Bun.Archive({ + "large.txt": largeContent, }); - await expect(async () => { - // @ts-expect-error - testing runtime behavior - await archive.blob("invalid"); + // Explicit empty options = also no compression + const emptyOptionsArchive = new Bun.Archive({ "large.txt": largeContent }, {}); + + // Explicit gzip compression + const compressedArchive = new Bun.Archive({ "large.txt": largeContent }, { compress: "gzip" }); + + const defaultBlob = await defaultArchive.blob(); + const emptyOptionsBlob = await emptyOptionsArchive.blob(); + const compressedBlob = await compressedArchive.blob(); + + // Default should match empty options (both uncompressed) + expect(defaultBlob.size).toBe(emptyOptionsBlob.size); + + // Compressed should be smaller than uncompressed + expect(compressedBlob.size).toBeLessThan(defaultBlob.size); + }); + + test("throws with invalid gzip level", () => { + expect(() => { + new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 0 }); + }).toThrow(); + + expect(() => { + new Bun.Archive({ "hello.txt": "Hello, World!" }, { compress: "gzip", level: 13 }); }).toThrow(); }); }); describe("archive.bytes()", () => { test("returns a Uint8Array", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -213,7 +267,7 @@ describe("Bun.Archive", () => { }); test("returns consistent output for same input", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -222,13 +276,22 @@ describe("Bun.Archive", () => { expect(bytes1.length).toBe(bytes2.length); }); - test("with gzip returns gzipped bytes", async () => { - const archive = Bun.Archive.from({ - "hello.txt": "Hello, World!", - }); + test("with gzip option returns gzipped bytes", async () => { + const regularArchive = new Bun.Archive( + { + "hello.txt": "Hello, World!", + }, + {}, // Empty options = no compression + ); + const gzipArchive = new Bun.Archive( + { + "hello.txt": "Hello, World!", + }, + { compress: "gzip" }, + ); - const regularBytes = await archive.bytes(); - const gzippedBytes = await archive.bytes("gzip"); + const regularBytes = await regularArchive.bytes(); + const gzippedBytes = await gzipArchive.bytes(); expect(gzippedBytes).toBeInstanceOf(Uint8Array); // Gzipped should be different size @@ -237,19 +300,28 @@ describe("Bun.Archive", () => { test("gzip is smaller for larger repetitive data", async () => { const largeContent = Buffer.alloc(13000, "Hello, World!"); - const archive = Bun.Archive.from({ - "large.txt": largeContent, - }); + const regularArchive = new Bun.Archive( + { + "large.txt": largeContent, + }, + {}, // Empty options = no compression + ); + const gzipArchive = new Bun.Archive( + { + "large.txt": largeContent, + }, + { compress: "gzip" }, + ); - const regularBytes = await archive.bytes(); - const gzippedBytes = await archive.bytes("gzip"); + const regularBytes = await regularArchive.bytes(); + const gzippedBytes = await gzipArchive.bytes(); // For large repetitive data, gzip should be smaller expect(gzippedBytes.length).toBeLessThan(regularBytes.length); }); test("bytes match blob content", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -262,24 +334,13 @@ describe("Bun.Archive", () => { expect(bytes[i]).toBe(blobBytes[i]); } }); - - test("throws with invalid compress argument", async () => { - const archive = Bun.Archive.from({ - "hello.txt": "Hello, World!", - }); - - await expect(async () => { - // @ts-expect-error - testing runtime behavior - await archive.bytes("deflate"); - }).toThrow(); - }); }); describe("archive.extract()", () => { test("extracts to directory and returns file count", async () => { using dir = tempDir("archive-extract-test", {}); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", "subdir/nested.txt": "Nested content", }); @@ -295,7 +356,7 @@ describe("Bun.Archive", () => { test("extracts nested directory structure", async () => { using dir = tempDir("archive-extract-nested", {}); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "root.txt": "Root file", "dir1/file1.txt": "File in dir1", "dir1/dir2/file2.txt": "File in dir1/dir2", @@ -316,7 +377,7 @@ describe("Bun.Archive", () => { using dir = tempDir("archive-extract-binary", {}); const binaryData = new Uint8Array([0, 1, 2, 255, 254, 253, 128, 127]); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "binary.bin": binaryData, }); @@ -333,13 +394,13 @@ describe("Bun.Archive", () => { using dir = tempDir("archive-extract-from-blob", {}); // Create original archive - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); // Get as blob and create new archive const blob = await sourceArchive.blob(); - const archive = Bun.Archive.from(blob); + const archive = new Bun.Archive(blob); const count = await archive.extract(String(dir)); expect(count).toBeGreaterThan(0); @@ -352,13 +413,13 @@ describe("Bun.Archive", () => { using dir = tempDir("archive-extract-from-bytes", {}); // Create original archive - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); // Get as bytes and create new archive const bytes = await sourceArchive.bytes(); - const archive = Bun.Archive.from(bytes); + const archive = new Bun.Archive(bytes); const count = await archive.extract(String(dir)); expect(count).toBeGreaterThan(0); @@ -368,7 +429,7 @@ describe("Bun.Archive", () => { }); test("throws with missing path argument", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -379,7 +440,7 @@ describe("Bun.Archive", () => { }); test("throws with non-string path argument", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -393,7 +454,7 @@ describe("Bun.Archive", () => { using dir = tempDir("archive-extract-create-dir", {}); const newDir = join(String(dir), "new-subdir", "nested"); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -410,7 +471,7 @@ describe("Bun.Archive", () => { "existing-file.txt": "I am a file", }); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -425,7 +486,7 @@ describe("Bun.Archive", () => { test("throws when extracting corrupted archive data", async () => { // Create garbage data that's not a valid archive const corruptedData = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); - const archive = Bun.Archive.from(corruptedData); + const archive = new Bun.Archive(corruptedData); using dir = tempDir("archive-corrupted", {}); @@ -436,14 +497,14 @@ describe("Bun.Archive", () => { test("throws when extracting truncated archive", async () => { // Create a valid archive then truncate it - const validArchive = Bun.Archive.from({ + const validArchive = new Bun.Archive({ "file.txt": "Hello, World!", }); const bytes = await validArchive.bytes(); // Truncate to only first 10 bytes - definitely incomplete const truncated = bytes.slice(0, 10); - const archive = Bun.Archive.from(truncated); + const archive = new Bun.Archive(truncated); using dir = tempDir("archive-truncated", {}); @@ -459,7 +520,7 @@ describe("Bun.Archive", () => { randomBytes[i] = Math.floor(Math.random() * 256); } - const archive = Bun.Archive.from(randomBytes); + const archive = new Bun.Archive(randomBytes); using dir = tempDir("archive-random", {}); @@ -471,7 +532,7 @@ describe("Bun.Archive", () => { test("handles empty archive gracefully", async () => { // Empty data const emptyData = new Uint8Array(0); - const archive = Bun.Archive.from(emptyData); + const archive = new Bun.Archive(emptyData); using dir = tempDir("archive-empty", {}); @@ -487,7 +548,7 @@ describe("Bun.Archive", () => { describe("path safety", () => { test("normalizes paths with redundant separators", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "dir//subdir///file.txt": "content", }); @@ -500,7 +561,7 @@ describe("Bun.Archive", () => { }); test("handles paths with dots correctly", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "dir/./file.txt": "content1", "dir/subdir/../file2.txt": "content2", }); @@ -516,7 +577,7 @@ describe("Bun.Archive", () => { test("handles very long filenames", async () => { // Create a filename that's quite long but within reasonable limits const longName = "a".repeat(200) + ".txt"; - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ [longName]: "content", }); @@ -535,7 +596,7 @@ describe("Bun.Archive", () => { test("handles deeply nested paths", async () => { // Create a deeply nested path const deepPath = Array(50).fill("dir").join("/") + "/file.txt"; - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ [deepPath]: "deep content", }); @@ -575,21 +636,29 @@ describe("Bun.Archive", () => { await Bun.Archive.write( archivePath, - { - "hello.txt": largeContent, - }, - "gzip", + new Bun.Archive( + { + "hello.txt": largeContent, + }, + { compress: "gzip" }, + ), ); // Verify file exists and is smaller than uncompressed const file = Bun.file(archivePath); expect(await file.exists()).toBe(true); - // Compare with uncompressed + // Compare with uncompressed (no options = no compression) const uncompressedPath = join(String(dir), "test.tar"); - await Bun.Archive.write(uncompressedPath, { - "hello.txt": largeContent, - }); + await Bun.Archive.write( + uncompressedPath, + new Bun.Archive( + { + "hello.txt": largeContent, + }, + {}, // Empty options = no compression + ), + ); expect(file.size).toBeLessThan(Bun.file(uncompressedPath).size); }); @@ -599,7 +668,7 @@ describe("Bun.Archive", () => { const archivePath = join(String(dir), "test.tar"); // Create archive and get blob - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "test.txt": "test content", }); const blob = await sourceArchive.blob(); @@ -625,7 +694,7 @@ describe("Bun.Archive", () => { // Extract it const blob = await Bun.file(archivePath).bytes(); - const archive = Bun.Archive.from(blob); + const archive = new Bun.Archive(blob); require("fs").mkdirSync(extractDir, { recursive: true }); const count = await archive.extract(extractDir); expect(count).toBeGreaterThan(0); @@ -649,29 +718,29 @@ describe("Bun.Archive", () => { }).toThrow(); }); - test("throws with invalid compress argument", async () => { - using dir = tempDir("archive-write-invalid-compress", {}); + test("throws with invalid gzip option", async () => { + using dir = tempDir("archive-write-invalid-gzip", {}); const archivePath = join(String(dir), "test.tar"); await expect(async () => { - // @ts-expect-error - testing runtime behavior - await Bun.Archive.write(archivePath, { "file.txt": "content" }, "invalid"); + await Bun.Archive.write( + archivePath, + new Bun.Archive({ "file.txt": "content" }, { compress: "gzip", level: 0 }), + ); }).toThrow(); - }); - }); - describe("new Archive()", () => { - test("throws when constructed directly", () => { - expect(() => { - // @ts-expect-error - testing runtime behavior - new Bun.Archive(); - }).toThrow("Archive cannot be constructed directly"); + await expect(async () => { + await Bun.Archive.write( + archivePath, + new Bun.Archive({ "file.txt": "content" }, { compress: "gzip", level: 13 }), + ); + }).toThrow(); }); }); describe("GC safety", () => { test("archive remains valid after GC", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", }); @@ -690,7 +759,7 @@ describe("Bun.Archive", () => { entries[`file${i}.txt`] = `Content for file ${i}`; } - const archive = Bun.Archive.from(entries); + const archive = new Bun.Archive(entries); // Force GC multiple times Bun.gc(true); @@ -705,7 +774,7 @@ describe("Bun.Archive", () => { test("original data mutation doesn't affect archive", async () => { const data = new Uint8Array([1, 2, 3, 4, 5]); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "data.bin": data, }); @@ -728,12 +797,12 @@ describe("Bun.Archive", () => { test("blob source mutation doesn't affect archive", async () => { const original = new Uint8Array([1, 2, 3, 4, 5]); const blob = new Blob([original]); - const sourceArchive = Bun.Archive.from({ + const sourceArchive = new Bun.Archive({ "data.bin": blob, }); const archiveBlob = await sourceArchive.blob(); - const archive = Bun.Archive.from(archiveBlob); + const archive = new Bun.Archive(archiveBlob); // Force GC Bun.gc(true); @@ -757,7 +826,7 @@ describe("Bun.Archive", () => { using dir = tempDir("archive-gc-no-ref", {}); // Create promise without keeping archive reference - const promise = Bun.Archive.from({ + const promise = new Bun.Archive({ "test.txt": "Hello from GC test!", }).extract(String(dir)); @@ -776,7 +845,7 @@ describe("Bun.Archive", () => { test("blob() works even if archive is not referenced", async () => { // Get blob promise without keeping archive reference - const promise = Bun.Archive.from({ + const promise = new Bun.Archive({ "file.txt": "Blob GC test content", }).blob(); @@ -791,7 +860,7 @@ describe("Bun.Archive", () => { test("bytes() works even if archive is not referenced", async () => { // Get bytes promise without keeping archive reference - const promise = Bun.Archive.from({ + const promise = new Bun.Archive({ "file.txt": "Bytes GC test content", }).bytes(); @@ -808,7 +877,7 @@ describe("Bun.Archive", () => { describe("large archives", () => { test("handles large file content", async () => { const largeContent = Buffer.alloc(1024 * 1024, "x"); // 1MB - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "large.txt": largeContent, }); @@ -825,7 +894,7 @@ describe("Bun.Archive", () => { entries[`file${i.toString().padStart(4, "0")}.txt`] = `Content ${i}`; } - const archive = Bun.Archive.from(entries); + const archive = new Bun.Archive(entries); using dir = tempDir("archive-many-files", {}); const count = await archive.extract(String(dir)); @@ -835,7 +904,7 @@ describe("Bun.Archive", () => { describe("special characters", () => { test("handles filenames with spaces", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file with spaces.txt": "content", }); @@ -849,7 +918,7 @@ describe("Bun.Archive", () => { test("handles special characters in filenames", async () => { // Note: Some unicode characters may not be supported by all tar formats // Using ASCII-only special characters - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file-with-dash.txt": "content1", "file_with_underscore.txt": "content2", "file.with.dots.txt": "content3", @@ -864,7 +933,7 @@ describe("Bun.Archive", () => { }); test("handles unicode content", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "unicode.txt": "Hello, 世界! Привет! Γειά σου!", }); @@ -878,7 +947,7 @@ describe("Bun.Archive", () => { describe("archive.files()", () => { test("returns a Map of File objects", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "hello.txt": "Hello, World!", "data.json": JSON.stringify({ foo: "bar" }), }); @@ -899,14 +968,14 @@ describe("Bun.Archive", () => { }); test("returns empty Map for empty archive", async () => { - const archive = Bun.Archive.from({}); + const archive = new Bun.Archive({}); const files = await archive.files(); expect(files).toBeInstanceOf(Map); expect(files.size).toBe(0); }); test("handles nested directory structure", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "root.txt": "Root file", "dir1/file1.txt": "File in dir1", "dir1/dir2/file2.txt": "File in dir1/dir2", @@ -921,7 +990,7 @@ describe("Bun.Archive", () => { }); test("filters files with glob pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file1.txt": "Text file 1", "file2.txt": "Text file 2", "file1.json": "JSON file 1", @@ -937,7 +1006,7 @@ describe("Bun.Archive", () => { }); test("filters with ** glob pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file1.txt": "Text file 1", "subdir/file2.txt": "Text file 2", "subdir/deep/file3.txt": "Text file 3", @@ -953,7 +1022,7 @@ describe("Bun.Archive", () => { }); test("filters with directory pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.js": "source 1", "src/util.js": "source 2", "test/index.test.js": "test 1", @@ -967,7 +1036,7 @@ describe("Bun.Archive", () => { }); test("returns empty Map when no files match glob", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file1.txt": "Text file", "file2.json": "JSON file", }); @@ -979,7 +1048,7 @@ describe("Bun.Archive", () => { test("handles binary data correctly", async () => { const binaryData = new Uint8Array([0, 1, 2, 255, 254, 253, 128, 127]); - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "binary.bin": binaryData, }); @@ -997,7 +1066,7 @@ describe("Bun.Archive", () => { test("File objects have lastModified property", async () => { // Tar archives store mtime in seconds, so round down to nearest second const beforeTime = Math.floor(Date.now() / 1000) * 1000; - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1010,7 +1079,7 @@ describe("Bun.Archive", () => { }); test("throws with non-string glob argument", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1021,12 +1090,15 @@ describe("Bun.Archive", () => { }); test("works with gzipped archive source", async () => { - const sourceArchive = Bun.Archive.from({ - "hello.txt": "Hello from gzip!", - }); + const sourceArchive = new Bun.Archive( + { + "hello.txt": "Hello from gzip!", + }, + { compress: "gzip" }, + ); - const gzippedBlob = await sourceArchive.blob("gzip"); - const archive = Bun.Archive.from(gzippedBlob); + const gzippedBlob = await sourceArchive.blob(); + const archive = new Bun.Archive(gzippedBlob); const files = await archive.files(); expect(files.size).toBe(1); @@ -1034,7 +1106,7 @@ describe("Bun.Archive", () => { }); test("concurrent files() operations work correctly", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1046,7 +1118,7 @@ describe("Bun.Archive", () => { }); test("files() works even if archive is not referenced (GC safety)", async () => { - const promise = Bun.Archive.from({ + const promise = new Bun.Archive({ "test.txt": "GC test content", }).files(); @@ -1069,7 +1141,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-small", {}); const tarData = await Bun.file(join(fixturesDir, "small-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "small-hole.bin")).bytes(); @@ -1085,7 +1157,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-1block", {}); const tarData = await Bun.file(join(fixturesDir, "one-block-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "one-block-hole.bin")).bytes(); @@ -1101,7 +1173,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-multi", {}); const tarData = await Bun.file(join(fixturesDir, "multi-block-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "multi-block-hole.bin")).bytes(); @@ -1116,7 +1188,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-leading", {}); const tarData = await Bun.file(join(fixturesDir, "leading-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "leading-hole.bin")).bytes(); @@ -1131,7 +1203,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-trailing", {}); const tarData = await Bun.file(join(fixturesDir, "trailing-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "trailing-hole.bin")).bytes(); @@ -1146,7 +1218,7 @@ describe("Bun.Archive", () => { using dir = tempDir("sparse-large", {}); const tarData = await Bun.file(join(fixturesDir, "large-hole.tar")).bytes(); - const archive = Bun.Archive.from(tarData); + const archive = new Bun.Archive(tarData); await archive.extract(String(dir)); const extracted = await Bun.file(join(String(dir), "large-hole.bin")).bytes(); @@ -1160,7 +1232,7 @@ describe("Bun.Archive", () => { describe("extract with glob patterns", () => { test("extracts only files matching glob pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.ts": "export {}", "src/utils.ts": "export {}", "src/types.d.ts": "declare {}", @@ -1183,7 +1255,7 @@ describe("Bun.Archive", () => { }); test("extracts files matching any of multiple glob patterns", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.ts": "export {}", "lib/utils.js": "module.exports = {}", "test/test.ts": "test()", @@ -1201,7 +1273,7 @@ describe("Bun.Archive", () => { }); test("excludes files matching negative pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.ts": "export {}", "src/index.test.ts": "test()", "src/utils.ts": "export {}", @@ -1220,7 +1292,7 @@ describe("Bun.Archive", () => { }); test("excludes files matching any of multiple negative patterns", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.ts": "export {}", "src/index.test.ts": "test()", "__tests__/helper.ts": "helper", @@ -1240,7 +1312,7 @@ describe("Bun.Archive", () => { }); test("combines positive and negative glob patterns", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "src/index.ts": "export {}", "src/index.test.ts": "test()", "src/utils.ts": "export {}", @@ -1264,7 +1336,7 @@ describe("Bun.Archive", () => { }); test("extracts all files when no patterns are provided", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file1.txt": "content1", "file2.txt": "content2", }); @@ -1278,7 +1350,7 @@ describe("Bun.Archive", () => { }); test("returns 0 when no files match glob pattern", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", "other.md": "markdown", }); @@ -1292,7 +1364,7 @@ describe("Bun.Archive", () => { describe("concurrent operations", () => { test("multiple extract operations run correctly", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1316,7 +1388,7 @@ describe("Bun.Archive", () => { }); test("multiple blob operations run correctly", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1327,7 +1399,7 @@ describe("Bun.Archive", () => { }); test("mixed operations run correctly", async () => { - const archive = Bun.Archive.from({ + const archive = new Bun.Archive({ "file.txt": "content", }); @@ -1340,4 +1412,131 @@ describe("Bun.Archive", () => { expect(count).toBeGreaterThan(0); }); }); + + describe("Bun.write with Archive", () => { + test("writes archive to local file", async () => { + const archive = new Bun.Archive({ + "hello.txt": "Hello, World!", + "data.json": JSON.stringify({ foo: "bar" }), + }); + + using dir = tempDir("archive-bunwrite", {}); + const tarPath = join(String(dir), "test.tar"); + + const bytesWritten = await Bun.write(tarPath, archive); + expect(bytesWritten).toBeGreaterThan(0); + + // Verify the file was written + expect(await Bun.file(tarPath).exists()).toBe(true); + + // Read it back and verify contents + const readArchive = new Bun.Archive(await Bun.file(tarPath).bytes()); + const files = await readArchive.files(); + expect(files.size).toBe(2); + expect(files.get("hello.txt")).toBeDefined(); + expect(await files.get("hello.txt")!.text()).toBe("Hello, World!"); + expect(await files.get("data.json")!.text()).toBe(JSON.stringify({ foo: "bar" })); + }); + + test("writes archive with nested directories", async () => { + const archive = new Bun.Archive({ + "root.txt": "root file", + "dir1/file1.txt": "file in dir1", + "dir1/dir2/file2.txt": "file in dir1/dir2", + }); + + using dir = tempDir("archive-bunwrite-nested", {}); + const tarPath = join(String(dir), "nested.tar"); + + await Bun.write(tarPath, archive); + + // Read it back + const readArchive = new Bun.Archive(await Bun.file(tarPath).bytes()); + const files = await readArchive.files(); + expect(files.size).toBe(3); + expect(await files.get("dir1/dir2/file2.txt")!.text()).toBe("file in dir1/dir2"); + }); + + test("writes archive with binary content", async () => { + const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd]); + const archive = new Bun.Archive({ + "binary.bin": binaryData, + }); + + using dir = tempDir("archive-bunwrite-binary", {}); + const tarPath = join(String(dir), "binary.tar"); + + await Bun.write(tarPath, archive); + + // Read it back + const readArchive = new Bun.Archive(await Bun.file(tarPath).bytes()); + const files = await readArchive.files(); + const extractedBinary = await files.get("binary.bin")!.bytes(); + expect(extractedBinary).toEqual(binaryData); + }); + + test("writes archive to Bun.file()", async () => { + const archive = new Bun.Archive({ + "test.txt": "test content", + }); + + using dir = tempDir("archive-bunwrite-file", {}); + const tarPath = join(String(dir), "test.tar"); + const bunFile = Bun.file(tarPath); + + await Bun.write(bunFile, archive); + + expect(await bunFile.exists()).toBe(true); + const readArchive = new Bun.Archive(await bunFile.bytes()); + const files = await readArchive.files(); + expect(await files.get("test.txt")!.text()).toBe("test content"); + }); + }); + + describe("TypeScript types", () => { + test("valid archive options", () => { + const files = { "hello.txt": "Hello, World!" }; + + // Valid: no options (no compression) + new Bun.Archive(files); + + // Valid: empty options (also no compression) + new Bun.Archive(files, {}); + + // Valid: explicit gzip compression + new Bun.Archive(files, { compress: "gzip" }); + + // Valid: gzip with level + new Bun.Archive(files, { compress: "gzip", level: 9 }); + }); + + test("invalid archive options throw TypeScript errors", () => { + // This test verifies that invalid options produce TypeScript errors + // The @ts-expect-error directives are checked at compile time + // We use a never-executed function to avoid runtime errors for "zstd" + const _typeCheck = () => { + const files = { "hello.txt": "Hello, World!" }; + // @ts-expect-error - invalid compression type (this throws at runtime) + new Bun.Archive(files, { compress: "zstd", level: 9 }); + }; + // Just verify the type checks pass - don't actually run the code + expect(_typeCheck).toBeDefined(); + }); + + test("level without compress is TypeScript error but no runtime error", async () => { + const files = { "hello.txt": "Hello, World!" }; + + // @ts-expect-error - level without compress is a TypeScript error + const archive = new Bun.Archive(files, { level: 9 }); + + // Should not throw at runtime - level is silently ignored, no compression used + expect(archive).toBeInstanceOf(Bun.Archive); + + // Verify it produces uncompressed output (same as empty options) + const uncompressedArchive = new Bun.Archive(files, {}); + const bytes = await archive.bytes(); + const uncompressedBytes = await uncompressedArchive.bytes(); + expect(bytes.length).toBe(uncompressedBytes.length); + }); + }); }); diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts index 68c6ef034b..a947d12004 100644 --- a/test/js/bun/s3/s3.test.ts +++ b/test/js/bun/s3/s3.test.ts @@ -1509,3 +1509,128 @@ describe.concurrent("s3 missing credentials", () => { }); }); }); + +// Archive + S3 integration tests +describe.skipIf(!minioCredentials)("Archive with S3", () => { + const credentials = minioCredentials!; + + it("writes archive to S3 via S3Client.write()", async () => { + const client = new Bun.S3Client(credentials); + const archive = new Bun.Archive({ + "hello.txt": "Hello from Archive!", + "data.json": JSON.stringify({ test: true }), + }); + + const key = randomUUIDv7() + ".tar"; + await client.write(key, archive); + + // Verify by downloading and reading back + const downloaded = await client.file(key).bytes(); + const readArchive = new Bun.Archive(downloaded); + const files = await readArchive.files(); + + expect(files.size).toBe(2); + expect(await files.get("hello.txt")!.text()).toBe("Hello from Archive!"); + expect(await files.get("data.json")!.text()).toBe(JSON.stringify({ test: true })); + + // Cleanup + await client.unlink(key); + }); + + it("writes archive to S3 via Bun.write() with s3:// URL", async () => { + const archive = new Bun.Archive({ + "file1.txt": "content1", + "dir/file2.txt": "content2", + }); + + const key = randomUUIDv7() + ".tar"; + const s3Url = `s3://${credentials.bucket}/${key}`; + + await Bun.write(s3Url, archive, { + ...credentials, + }); + + // Verify by downloading + const s3File = Bun.file(s3Url, credentials); + const downloaded = await s3File.bytes(); + const readArchive = new Bun.Archive(downloaded); + const files = await readArchive.files(); + + expect(files.size).toBe(2); + expect(await files.get("file1.txt")!.text()).toBe("content1"); + expect(await files.get("dir/file2.txt")!.text()).toBe("content2"); + + // Cleanup + await s3File.delete(); + }); + + it("writes archive with binary content to S3", async () => { + const client = new Bun.S3Client(credentials); + const binaryData = new Uint8Array([0x00, 0x01, 0x02, 0xff, 0xfe, 0xfd, 0x80, 0x7f]); + const archive = new Bun.Archive({ + "binary.bin": binaryData, + }); + + const key = randomUUIDv7() + ".tar"; + await client.write(key, archive); + + // Verify binary data is preserved + const downloaded = await client.file(key).bytes(); + const readArchive = new Bun.Archive(downloaded); + const files = await readArchive.files(); + const extractedBinary = await files.get("binary.bin")!.bytes(); + + expect(extractedBinary).toEqual(binaryData); + + // Cleanup + await client.unlink(key); + }); + + it("writes large archive to S3", async () => { + const client = new Bun.S3Client(credentials); + + // Create archive with multiple files + const entries: Record = {}; + for (let i = 0; i < 50; i++) { + entries[`file${i.toString().padStart(3, "0")}.txt`] = `Content for file ${i}`; + } + const archive = new Bun.Archive(entries); + + const key = randomUUIDv7() + ".tar"; + await client.write(key, archive); + + // Verify + const downloaded = await client.file(key).bytes(); + const readArchive = new Bun.Archive(downloaded); + const files = await readArchive.files(); + + expect(files.size).toBe(50); + expect(await files.get("file000.txt")!.text()).toBe("Content for file 0"); + expect(await files.get("file049.txt")!.text()).toBe("Content for file 49"); + + // Cleanup + await client.unlink(key); + }); + + it("writes archive via s3File.write()", async () => { + const client = new Bun.S3Client(credentials); + const archive = new Bun.Archive({ + "test.txt": "Hello via s3File.write()!", + }); + + const key = randomUUIDv7() + ".tar"; + const s3File = client.file(key); + await s3File.write(archive); + + // Verify + const downloaded = await s3File.bytes(); + const readArchive = new Bun.Archive(downloaded); + const files = await readArchive.files(); + + expect(files.size).toBe(1); + expect(await files.get("test.txt")!.text()).toBe("Hello via s3File.write()!"); + + // Cleanup + await s3File.delete(); + }); +});