mirror of
https://github.com/oven-sh/bun
synced 2026-02-04 16:08:53 +00:00
Compare commits
6 Commits
dylan/pyth
...
claude/tar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a06fb91e15 | ||
|
|
d0803dbd21 | ||
|
|
58e7919674 | ||
|
|
2a1684648a | ||
|
|
8df01eeae6 | ||
|
|
64fd7ca062 |
6
bun.lock
6
bun.lock
@@ -33,6 +33,8 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "5.9.2",
|
||||
"undici-types": "^7.16.0",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
@@ -312,7 +314,7 @@
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
@@ -334,6 +336,8 @@
|
||||
|
||||
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
|
||||
|
||||
"@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
|
||||
|
||||
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],
|
||||
|
||||
180
packages/bun-types/bun.d.ts
vendored
180
packages/bun-types/bun.d.ts
vendored
@@ -4020,6 +4020,186 @@ declare module "bun" {
|
||||
*/
|
||||
function zstdDecompress(data: NodeJS.TypedArray | Buffer | string | ArrayBuffer): Promise<Buffer>;
|
||||
|
||||
/**
|
||||
* Options for creating a tar archive
|
||||
*/
|
||||
interface TarballOptions {
|
||||
/**
|
||||
* Files to include in the archive.
|
||||
*
|
||||
* Keys are the paths inside the tar archive.
|
||||
* Use POSIX-style forward slashes (`/`) for separators. Avoid absolute paths,
|
||||
* backslashes (`\`), and `..` segments; these may be rejected or normalized.
|
||||
*
|
||||
* Values are the file contents, which can be:
|
||||
* - `string`: text content
|
||||
* - `Blob` or `BunFile`: binary or text content
|
||||
* - `ArrayBuffer` / `TypedArray`: binary data
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* {
|
||||
* "README.md": Bun.file("./README.md"), // from BunFile
|
||||
* "data.json": new Blob([JSON.stringify(data)]), // from Blob
|
||||
* "binary.dat": new Uint8Array([1, 2, 3]) // from TypedArray
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
files: Record<string, string | Blob | BunFile | ArrayBufferView | ArrayBuffer>;
|
||||
|
||||
/**
|
||||
* Optional destination for the archive.
|
||||
*
|
||||
* - If omitted: returns the archive as a `Blob` (in-memory)
|
||||
* - If string path: writes to file and returns byte count
|
||||
*
|
||||
* **Note**: In-memory creation uses a pre-allocated buffer limited to 100MB.
|
||||
* For larger archives, use the `destination` option to write directly to disk.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await Bun.tarball({ files: {...} }) // returns Blob
|
||||
* await Bun.tarball({ files: {...}, destination: "out.tar" }) // returns number
|
||||
* ```
|
||||
*/
|
||||
destination?: string;
|
||||
|
||||
/**
|
||||
* Optional compression.
|
||||
*
|
||||
* - `"gzip"`: use gzip with default level (6)
|
||||
* - `{ type: "gzip", level: 0-9 }`: use gzip with specific compression level
|
||||
* - 0 = no compression (fastest)
|
||||
* - 9 = maximum compression (slowest)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await Bun.tarball({ files: {...}, compress: "gzip" })
|
||||
* await Bun.tarball({ files: {...}, compress: { type: "gzip", level: 9 } })
|
||||
* ```
|
||||
*/
|
||||
compress?: "gzip" | { type: "gzip"; level?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a tar archive from files.
|
||||
*
|
||||
* When `destination` is specified, writes to a file and returns the number of bytes written.
|
||||
* When `destination` is omitted, returns the archive as a `Blob`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Create tar.gz in memory
|
||||
* const blob = await Bun.tarball({
|
||||
* files: {
|
||||
* "README.md": Bun.file("./README.md"),
|
||||
* "package.json": Bun.file("./package.json"),
|
||||
* "src/index.ts": Bun.file("./src/index.ts"),
|
||||
* },
|
||||
* compress: "gzip",
|
||||
* });
|
||||
* await Bun.write("archive.tar.gz", blob);
|
||||
*
|
||||
* // Write directly to file
|
||||
* const bytes = await Bun.tarball({
|
||||
* files: {
|
||||
* "data.json": new Blob([JSON.stringify({ foo: "bar" })]),
|
||||
* },
|
||||
* destination: "./output.tar",
|
||||
* compress: { type: "gzip", level: 9 },
|
||||
* });
|
||||
* console.log(`Wrote ${bytes} bytes`);
|
||||
* ```
|
||||
*/
|
||||
function tarball(options: TarballOptions & { destination: string }): Promise<number>;
|
||||
function tarball(options: Omit<TarballOptions, "destination">): Promise<Blob>;
|
||||
|
||||
interface ExtractOptions {
|
||||
/**
|
||||
* Glob pattern(s) to filter which files to extract.
|
||||
* Supports standard glob syntax including `*`, `?`, and `[...]`.
|
||||
* Multiple patterns can be provided as an array.
|
||||
* Patterns starting with `!` are treated as negations.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Extract only TypeScript files
|
||||
* await Bun.extract("archive.tar", { glob: "*.ts" })
|
||||
*
|
||||
* // Extract multiple patterns
|
||||
* await Bun.extract("archive.tar", { glob: ["src/*.js", "!src/test.js"] })
|
||||
* ```
|
||||
*/
|
||||
glob?: string | string[];
|
||||
|
||||
/**
|
||||
* Number of leading path components to skip when extracting.
|
||||
* Similar to `tar --strip-components`.
|
||||
* Must be a non-negative integer (0-128).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // If archive contains "foo/bar/baz.txt", skipPathComponents: 2 extracts as "baz.txt"
|
||||
* await Bun.extract("archive.tar", { skipPathComponents: 2 })
|
||||
* ```
|
||||
*/
|
||||
skipPathComponents?: number;
|
||||
|
||||
/**
|
||||
* Directory to extract files into.
|
||||
* If omitted, returns files as an object of Blobs.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await Bun.extract("archive.tar") // returns Record<string, Blob>
|
||||
* await Bun.extract("archive.tar", { destination: "./out" }) // returns number of files
|
||||
* ```
|
||||
*/
|
||||
destination?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract files from a tar archive.
|
||||
*
|
||||
* When `destination` is specified, extracts to disk and returns the number of files extracted.
|
||||
* When `destination` is omitted, returns files as an object mapping paths to Blobs.
|
||||
*
|
||||
* **Limitations**:
|
||||
* - When passing a file path as a string, archives are limited to 100MB to prevent OOM.
|
||||
* For larger archives, pass a Blob or Buffer directly instead.
|
||||
* - Symlinks in archives are currently **not extracted** and are silently skipped.
|
||||
* This is a security measure to prevent symlink-based attacks.
|
||||
* - Future versions may add opt-in symlink support with safety checks.
|
||||
*
|
||||
* @param path Path to archive file (max 100MB), or archive data as Blob/Buffer/ArrayBufferView
|
||||
* @param options Extract options
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Extract to memory
|
||||
* const files = await Bun.extract("archive.tar.gz");
|
||||
* console.log(await files["README.md"].text());
|
||||
*
|
||||
* // Extract to disk
|
||||
* const count = await Bun.extract("archive.tar", { destination: "./output" });
|
||||
* console.log(`Extracted ${count} files`);
|
||||
*
|
||||
* // With path stripping
|
||||
* await Bun.extract("archive.tar", {
|
||||
* destination: "./src",
|
||||
* skipPathComponents: 1,
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
function extract(
|
||||
path: string | Blob | BunFile | ArrayBufferView | ArrayBuffer,
|
||||
options: ExtractOptions & { destination: string },
|
||||
): Promise<number>;
|
||||
function extract(
|
||||
path: string | Blob | BunFile | ArrayBufferView | ArrayBuffer,
|
||||
options?: Omit<ExtractOptions, "destination">,
|
||||
): Promise<Record<string, Blob>>;
|
||||
|
||||
type Target =
|
||||
/**
|
||||
* For generating bundles that are intended to be run by the Bun runtime. In many cases,
|
||||
|
||||
@@ -24,7 +24,9 @@
|
||||
"@types/react": "^19"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19"
|
||||
"@types/react": "^19",
|
||||
"typescript": "5.9.2",
|
||||
"undici-types": "^7.16.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "echo $(pwd)",
|
||||
|
||||
@@ -17,6 +17,7 @@ pub const BunObject = struct {
|
||||
pub const createParsedShellScript = toJSCallback(bun.shell.ParsedShellScript.createParsedShellScript);
|
||||
pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter);
|
||||
pub const deflateSync = toJSCallback(JSZlib.deflateSync);
|
||||
pub const extract = toJSCallback(Bun.extractTarball);
|
||||
pub const file = toJSCallback(WebCore.Blob.constructBunFile);
|
||||
pub const gunzipSync = toJSCallback(JSZlib.gunzipSync);
|
||||
pub const gzipSync = toJSCallback(JSZlib.gzipSync);
|
||||
@@ -37,6 +38,7 @@ pub const BunObject = struct {
|
||||
pub const sleepSync = toJSCallback(Bun.sleepSync);
|
||||
pub const spawn = toJSCallback(host_fn.wrapStaticMethod(api.Subprocess, "spawn", false));
|
||||
pub const spawnSync = toJSCallback(host_fn.wrapStaticMethod(api.Subprocess, "spawnSync", false));
|
||||
pub const tarball = toJSCallback(Bun.createTarball);
|
||||
pub const udpSocket = toJSCallback(host_fn.wrapStaticMethod(api.UDPSocket, "udpSocket", false));
|
||||
pub const which = toJSCallback(Bun.which);
|
||||
pub const write = toJSCallback(jsc.WebCore.Blob.writeFile);
|
||||
@@ -153,6 +155,7 @@ pub const BunObject = struct {
|
||||
@export(&BunObject.createParsedShellScript, .{ .name = callbackName("createParsedShellScript") });
|
||||
@export(&BunObject.createShellInterpreter, .{ .name = callbackName("createShellInterpreter") });
|
||||
@export(&BunObject.deflateSync, .{ .name = callbackName("deflateSync") });
|
||||
@export(&BunObject.extract, .{ .name = callbackName("extract") });
|
||||
@export(&BunObject.file, .{ .name = callbackName("file") });
|
||||
@export(&BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") });
|
||||
@export(&BunObject.gzipSync, .{ .name = callbackName("gzipSync") });
|
||||
@@ -173,6 +176,7 @@ pub const BunObject = struct {
|
||||
@export(&BunObject.sleepSync, .{ .name = callbackName("sleepSync") });
|
||||
@export(&BunObject.spawn, .{ .name = callbackName("spawn") });
|
||||
@export(&BunObject.spawnSync, .{ .name = callbackName("spawnSync") });
|
||||
@export(&BunObject.tarball, .{ .name = callbackName("tarball") });
|
||||
@export(&BunObject.udpSocket, .{ .name = callbackName("udpSocket") });
|
||||
@export(&BunObject.which, .{ .name = callbackName("which") });
|
||||
@export(&BunObject.write, .{ .name = callbackName("write") });
|
||||
@@ -2012,6 +2016,233 @@ pub const JSZstd = struct {
|
||||
return job.promise.value();
|
||||
}
|
||||
};
|
||||
pub const Compression = TarballJobModule.Compression;
|
||||
pub const TarballJob = TarballJobModule.TarballJob;
|
||||
pub const ExtractJob = ExtractJobModule.ExtractJob;
|
||||
|
||||
pub fn createTarball(globalThis: *JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
|
||||
const arguments = callframe.arguments();
|
||||
if (arguments.len < 1) {
|
||||
return globalThis.throwInvalidArguments("Expected options object", .{});
|
||||
}
|
||||
|
||||
const opts = arguments[0];
|
||||
if (!opts.isObject()) {
|
||||
return globalThis.throwInvalidArguments("Expected options to be an object", .{});
|
||||
}
|
||||
|
||||
// Parse files
|
||||
const files_value = try opts.get(globalThis, "files") orelse {
|
||||
return globalThis.throwInvalidArguments("Missing required field: files", .{});
|
||||
};
|
||||
|
||||
var file_list = try parseFileList(globalThis, files_value);
|
||||
errdefer file_list.deinit();
|
||||
|
||||
if (file_list.entries.len == 0) {
|
||||
return globalThis.throwInvalidArguments("files object cannot be empty", .{});
|
||||
}
|
||||
|
||||
// Parse optional destination
|
||||
const destination = if (try opts.getOptional(globalThis, "destination", ZigString.Slice)) |slice| blk: {
|
||||
defer slice.deinit();
|
||||
break :blk try bun.default_allocator.dupe(u8, slice.slice());
|
||||
} else null;
|
||||
errdefer if (destination) |d| bun.default_allocator.free(d);
|
||||
|
||||
// Parse optional compression
|
||||
var compression: Compression = .{ .none = {} };
|
||||
|
||||
if (try opts.get(globalThis, "compress")) |comp| {
|
||||
if (comp.isString()) {
|
||||
const str = try comp.toSlice(globalThis, bun.default_allocator);
|
||||
defer str.deinit();
|
||||
if (bun.strings.eqlComptime(str.slice(), "gzip")) {
|
||||
compression = .{ .gzip = 6 }; // default level
|
||||
} else {
|
||||
return globalThis.throwInvalidArguments("compress must be 'gzip' or object", .{});
|
||||
}
|
||||
} else if (comp.isObject()) {
|
||||
const type_val = try comp.get(globalThis, "type") orelse {
|
||||
return globalThis.throwInvalidArguments("compress.type is required", .{});
|
||||
};
|
||||
const type_str = try type_val.toSlice(globalThis, bun.default_allocator);
|
||||
defer type_str.deinit();
|
||||
if (!bun.strings.eqlComptime(type_str.slice(), "gzip")) {
|
||||
return globalThis.throwInvalidArguments("Only 'gzip' compression supported", .{});
|
||||
}
|
||||
|
||||
var level: u8 = 6;
|
||||
if (try comp.get(globalThis, "level")) |level_val| {
|
||||
const num = try level_val.coerce(i32, globalThis);
|
||||
if (num < 0 or num > 9) {
|
||||
return globalThis.throwInvalidArguments("compression level must be 0-9", .{});
|
||||
}
|
||||
level = @intCast(num);
|
||||
}
|
||||
compression = .{ .gzip = level };
|
||||
}
|
||||
}
|
||||
|
||||
const vm = globalThis.bunVM();
|
||||
var job = TarballJob.create(vm, globalThis, file_list, destination, compression);
|
||||
return job.promise.value();
|
||||
}
|
||||
|
||||
fn parseFileList(globalThis: *JSGlobalObject, files_obj: JSValue) !TarballJobModule.FileList {
|
||||
if (!files_obj.isObject()) {
|
||||
return globalThis.throwInvalidArguments("files must be an object", .{});
|
||||
}
|
||||
|
||||
const allocator = bun.default_allocator;
|
||||
var entries = std.ArrayList(TarballJobModule.FileEntry).init(allocator);
|
||||
errdefer {
|
||||
for (entries.items) |*entry| entry.deinit(allocator);
|
||||
entries.deinit();
|
||||
}
|
||||
|
||||
var iter = try jsc.JSPropertyIterator(.{
|
||||
.skip_empty_name = true,
|
||||
.include_value = true,
|
||||
}).init(globalThis, try files_obj.toObject(globalThis));
|
||||
defer iter.deinit();
|
||||
|
||||
while (try iter.next()) |prop_name| {
|
||||
const value = iter.value;
|
||||
if (value.isUndefined()) continue;
|
||||
|
||||
var entry = TarballJobModule.FileEntry{
|
||||
.archive_path = try prop_name.toOwnedSlice(allocator),
|
||||
.data = try jsc.Node.BlobOrStringOrBuffer.fromJSWithEncodingValueMaybeAsync(
|
||||
globalThis,
|
||||
allocator,
|
||||
value,
|
||||
.js_undefined,
|
||||
true,
|
||||
) orelse {
|
||||
return globalThis.throwInvalidArguments("File values must be string, Blob, or Buffer", .{});
|
||||
},
|
||||
};
|
||||
errdefer entry.deinit(allocator);
|
||||
|
||||
entries.append(entry) catch |err| {
|
||||
entry.deinit(allocator);
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
return .{
|
||||
.entries = try entries.toOwnedSlice(),
|
||||
.allocator = allocator,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn extractTarball(globalThis: *JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
|
||||
const arguments = callframe.arguments();
|
||||
if (arguments.len < 1) {
|
||||
return globalThis.throwInvalidArguments("Expected archive data", .{});
|
||||
}
|
||||
|
||||
// Parse archive data
|
||||
const archive_value = arguments[0];
|
||||
const archive_data = blk: {
|
||||
if (archive_value.isString()) {
|
||||
// File path - limited to 100MB to prevent OOM
|
||||
const path_slice = try archive_value.toSlice(globalThis, bun.default_allocator);
|
||||
defer path_slice.deinit();
|
||||
const max_size = 100 * 1024 * 1024; // 100MB max
|
||||
break :blk std.fs.cwd().readFileAlloc(
|
||||
bun.default_allocator,
|
||||
path_slice.slice(),
|
||||
max_size,
|
||||
) catch |err| {
|
||||
if (err == error.FileTooBig) {
|
||||
return globalThis.throwValue(globalThis.createErrorInstance("Archive file exceeds 100MB limit. Use Blob or Buffer for larger files.", .{}));
|
||||
}
|
||||
return globalThis.throwValue(globalThis.createErrorInstance("Failed to read file: {s}", .{@errorName(err)}));
|
||||
};
|
||||
} else {
|
||||
// Blob, Buffer, or ArrayBufferView
|
||||
const blob_or_buf = try jsc.Node.BlobOrStringOrBuffer.fromJSWithEncodingValueMaybeAsync(
|
||||
globalThis,
|
||||
bun.default_allocator,
|
||||
archive_value,
|
||||
.js_undefined,
|
||||
true,
|
||||
) orelse {
|
||||
return globalThis.throwInvalidArguments("Expected string, Blob, Buffer, or ArrayBufferView", .{});
|
||||
};
|
||||
defer blob_or_buf.deinit();
|
||||
break :blk try bun.default_allocator.dupe(u8, blob_or_buf.slice());
|
||||
}
|
||||
};
|
||||
errdefer bun.default_allocator.free(archive_data);
|
||||
|
||||
// Parse options if provided
|
||||
var destination: ?[]const u8 = null;
|
||||
var glob_patterns: ?[][]const u8 = null;
|
||||
var skip_components: u32 = 0;
|
||||
errdefer if (destination) |d| bun.default_allocator.free(d);
|
||||
errdefer if (glob_patterns) |patterns| {
|
||||
for (patterns) |p| bun.default_allocator.free(p);
|
||||
bun.default_allocator.free(patterns);
|
||||
};
|
||||
|
||||
if (arguments.len > 1 and arguments[1].isObject()) {
|
||||
const opts = arguments[1];
|
||||
|
||||
if (try opts.getOptional(globalThis, "destination", ZigString.Slice)) |slice| {
|
||||
defer slice.deinit();
|
||||
destination = try bun.default_allocator.dupe(u8, slice.slice());
|
||||
}
|
||||
|
||||
if (try opts.get(globalThis, "glob")) |glob_val| {
|
||||
if (glob_val.isString()) {
|
||||
const pattern_slice = try glob_val.toSlice(globalThis, bun.default_allocator);
|
||||
defer pattern_slice.deinit();
|
||||
|
||||
const patterns = try bun.default_allocator.alloc([]const u8, 1);
|
||||
errdefer bun.default_allocator.free(patterns);
|
||||
|
||||
patterns[0] = try bun.default_allocator.dupe(u8, pattern_slice.slice());
|
||||
glob_patterns = patterns;
|
||||
} else if (glob_val.jsType().isArray()) {
|
||||
var patterns = std.ArrayList([]const u8).init(bun.default_allocator);
|
||||
errdefer {
|
||||
for (patterns.items) |p| bun.default_allocator.free(p);
|
||||
patterns.deinit();
|
||||
}
|
||||
|
||||
var iter = try glob_val.arrayIterator(globalThis);
|
||||
while (try iter.next()) |item| {
|
||||
if (item.isString()) {
|
||||
const pattern_slice = try item.toSlice(globalThis, bun.default_allocator);
|
||||
defer pattern_slice.deinit();
|
||||
const dup = try bun.default_allocator.dupe(u8, pattern_slice.slice());
|
||||
errdefer bun.default_allocator.free(dup);
|
||||
try patterns.append(dup);
|
||||
}
|
||||
}
|
||||
|
||||
if (patterns.items.len > 0) {
|
||||
glob_patterns = try patterns.toOwnedSlice();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (try opts.get(globalThis, "skipPathComponents")) |skip_val| {
|
||||
const skip_u32 = skip_val.toU32();
|
||||
if (skip_u32 > 128) {
|
||||
return globalThis.throwInvalidArguments("skipPathComponents must be between 0 and 128", .{});
|
||||
}
|
||||
skip_components = @intCast(skip_u32);
|
||||
}
|
||||
}
|
||||
|
||||
const vm = globalThis.bunVM();
|
||||
const job = ExtractJob.create(vm, globalThis, archive_data, destination, glob_patterns, skip_components);
|
||||
return job.promise.value();
|
||||
}
|
||||
|
||||
// const InternalTestingAPIs = struct {
|
||||
// pub fn BunInternalFunction__syntaxHighlighter(globalThis: *JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
|
||||
@@ -2081,6 +2312,8 @@ pub fn createBunStdout(globalThis: *jsc.JSGlobalObject) callconv(.C) jsc.JSValue
|
||||
}
|
||||
|
||||
const Braces = @import("../../shell/braces.zig");
|
||||
const ExtractJobModule = @import("./ExtractJob.zig");
|
||||
const TarballJobModule = @import("./TarballJob.zig");
|
||||
const Which = @import("../../which.zig");
|
||||
const options = @import("../../options.zig");
|
||||
const std = @import("std");
|
||||
|
||||
309
src/bun.js/api/ExtractJob.zig
Normal file
309
src/bun.js/api/ExtractJob.zig
Normal file
@@ -0,0 +1,309 @@
|
||||
const MAX_MEMORY_SIZE = 100 * 1024 * 1024;
|
||||
|
||||
pub const ExtractJob = struct {
|
||||
archive_data: []const u8,
|
||||
destination: ?[]const u8,
|
||||
glob_patterns: ?[][]const u8,
|
||||
skip_components: u32,
|
||||
task: jsc.WorkPoolTask = .{ .callback = &runTask },
|
||||
promise: jsc.JSPromise.Strong = .{},
|
||||
vm: *jsc.VirtualMachine,
|
||||
files: std.StringArrayHashMap([]u8),
|
||||
file_count: usize = 0,
|
||||
error_message: ?[]const u8 = null,
|
||||
any_task: jsc.AnyTask,
|
||||
poll: Async.KeepAlive = .{},
|
||||
|
||||
pub fn create(
|
||||
vm: *jsc.VirtualMachine,
|
||||
globalObject: *JSGlobalObject,
|
||||
archive_data: []const u8,
|
||||
destination: ?[]const u8,
|
||||
glob_patterns: ?[][]const u8,
|
||||
skip_components: u32,
|
||||
) *ExtractJob {
|
||||
const job = bun.default_allocator.create(ExtractJob) catch bun.outOfMemory();
|
||||
job.* = .{
|
||||
.archive_data = archive_data,
|
||||
.destination = destination,
|
||||
.glob_patterns = glob_patterns,
|
||||
.skip_components = skip_components,
|
||||
.vm = vm,
|
||||
.files = std.StringArrayHashMap([]u8).init(bun.default_allocator),
|
||||
.any_task = undefined,
|
||||
};
|
||||
|
||||
job.promise = jsc.JSPromise.Strong.init(globalObject);
|
||||
job.any_task = jsc.AnyTask.New(@This(), &runFromJS).init(job);
|
||||
job.poll.ref(vm);
|
||||
jsc.WorkPool.schedule(&job.task);
|
||||
return job;
|
||||
}
|
||||
|
||||
pub fn runTask(task: *jsc.WorkPoolTask) void {
|
||||
const job: *ExtractJob = @fieldParentPtr("task", task);
|
||||
defer job.vm.enqueueTaskConcurrent(jsc.ConcurrentTask.create(job.any_task.task()));
|
||||
job.extractArchive() catch {
|
||||
job.error_message = "Failed to extract archive";
|
||||
};
|
||||
}
|
||||
|
||||
fn extractArchive(this: *ExtractJob) anyerror!void {
|
||||
if (this.destination) |dest| {
|
||||
if (this.glob_patterns == null and this.skip_components == 0) {
|
||||
const is_absolute = std.fs.path.isAbsolute(dest);
|
||||
var dir = if (is_absolute)
|
||||
try std.fs.openDirAbsolute(dest, .{})
|
||||
else
|
||||
try std.fs.cwd().openDir(dest, .{});
|
||||
defer dir.close();
|
||||
|
||||
this.file_count = try bun.libarchive.Archiver.extractToDir(
|
||||
this.archive_data,
|
||||
dir,
|
||||
null,
|
||||
void,
|
||||
{},
|
||||
.{ .depth_to_skip = 0 },
|
||||
);
|
||||
} else {
|
||||
try this.extractToDisk(dest);
|
||||
}
|
||||
} else {
|
||||
try this.extractToMemory();
|
||||
}
|
||||
}
|
||||
|
||||
fn extractToDisk(this: *ExtractJob, dest: []const u8) anyerror!void {
|
||||
const lib = bun.libarchive.lib;
|
||||
var reader: bun.libarchive.BufferReadStream = undefined;
|
||||
reader.init(this.archive_data);
|
||||
defer reader.deinit();
|
||||
|
||||
switch (reader.openRead()) {
|
||||
.ok => {},
|
||||
else => return error.CannotOpenArchive,
|
||||
}
|
||||
|
||||
const archive = reader.archive;
|
||||
var entry: *lib.Archive.Entry = undefined;
|
||||
var normalized_buf: bun.PathBuffer = undefined;
|
||||
|
||||
loop: while (true) {
|
||||
switch (archive.readNextHeader(&entry)) {
|
||||
.ok => {},
|
||||
.eof => break,
|
||||
.retry => continue,
|
||||
else => return error.ReadError,
|
||||
}
|
||||
|
||||
const pathname = entry.pathname();
|
||||
const kind = bun.sys.kindFromMode(entry.filetype());
|
||||
|
||||
const path_to_use = if (this.skip_components > 0) blk: {
|
||||
var tokenizer = std.mem.tokenizeScalar(u8, pathname, '/');
|
||||
for (0..this.skip_components) |_| {
|
||||
if (tokenizer.next() == null) continue :loop;
|
||||
}
|
||||
break :blk tokenizer.rest();
|
||||
} else bun.asByteSlice(pathname);
|
||||
|
||||
const normalized = bun.path.normalizeBuf(path_to_use, &normalized_buf, .auto);
|
||||
if (normalized.len == 0 or (normalized.len == 1 and normalized[0] == '.')) continue;
|
||||
if (std.fs.path.isAbsolute(normalized)) continue;
|
||||
|
||||
{
|
||||
var it = std.mem.splitScalar(u8, normalized, '/');
|
||||
while (it.next()) |segment| {
|
||||
if (std.mem.eql(u8, segment, "..")) continue :loop;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.glob_patterns) |patterns| {
|
||||
var matched = false;
|
||||
for (patterns) |pattern| {
|
||||
if (bun.glob.match(pattern, normalized).matches()) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) continue;
|
||||
}
|
||||
|
||||
switch (kind) {
|
||||
.directory => {
|
||||
var path_buf: bun.PathBuffer = undefined;
|
||||
const dest_path = bun.path.joinAbsStringBufZ(dest, &path_buf, &.{normalized}, .auto);
|
||||
bun.makePath(std.fs.cwd(), bun.asByteSlice(dest_path)) catch {};
|
||||
},
|
||||
.file => {
|
||||
const size = entry.size();
|
||||
if (size < 0) continue;
|
||||
|
||||
var path_buf: bun.PathBuffer = undefined;
|
||||
const dest_path = bun.path.joinAbsStringBufZ(dest, &path_buf, &.{normalized}, .auto);
|
||||
const dirname = bun.path.dirname(dest_path, .auto);
|
||||
if (dirname.len > 0) bun.makePath(std.fs.cwd(), dirname) catch {};
|
||||
|
||||
const fd = bun.sys.open(dest_path, bun.O.CREAT | bun.O.WRONLY | bun.O.TRUNC, 0o644).unwrap() catch continue;
|
||||
defer fd.close();
|
||||
|
||||
if (size > 0) {
|
||||
switch (archive.readDataIntoFd(fd.cast())) {
|
||||
.ok => {},
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
this.file_count += 1;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extractToMemory(this: *ExtractJob) anyerror!void {
|
||||
const lib = bun.libarchive.lib;
|
||||
const allocator = bun.default_allocator;
|
||||
|
||||
var reader: bun.libarchive.BufferReadStream = undefined;
|
||||
reader.init(this.archive_data);
|
||||
defer reader.deinit();
|
||||
|
||||
switch (reader.openRead()) {
|
||||
.ok => {},
|
||||
else => return error.CannotOpenArchive,
|
||||
}
|
||||
|
||||
const archive = reader.archive;
|
||||
var entry: *lib.Archive.Entry = undefined;
|
||||
var normalized_buf: bun.PathBuffer = undefined;
|
||||
var bytes_total: usize = 0;
|
||||
|
||||
loop: while (true) {
|
||||
switch (archive.readNextHeader(&entry)) {
|
||||
.ok => {},
|
||||
.eof => break,
|
||||
.retry => continue,
|
||||
else => return error.ReadError,
|
||||
}
|
||||
|
||||
const pathname = entry.pathname();
|
||||
const kind = bun.sys.kindFromMode(entry.filetype());
|
||||
if (kind != .file) continue;
|
||||
|
||||
const path_to_use = if (this.skip_components > 0) blk: {
|
||||
var tokenizer = std.mem.tokenizeScalar(u8, pathname, '/');
|
||||
for (0..this.skip_components) |_| {
|
||||
if (tokenizer.next() == null) continue :loop;
|
||||
}
|
||||
break :blk tokenizer.rest();
|
||||
} else bun.asByteSlice(pathname);
|
||||
|
||||
const normalized = bun.path.normalizeBuf(path_to_use, &normalized_buf, .auto);
|
||||
if (normalized.len == 0 or (normalized.len == 1 and normalized[0] == '.')) continue;
|
||||
if (std.fs.path.isAbsolute(normalized)) continue;
|
||||
|
||||
{
|
||||
var it = std.mem.splitScalar(u8, normalized, '/');
|
||||
while (it.next()) |segment| {
|
||||
if (std.mem.eql(u8, segment, "..")) continue :loop;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.glob_patterns) |patterns| {
|
||||
var matched = false;
|
||||
for (patterns) |pattern| {
|
||||
if (bun.glob.match(pattern, normalized).matches()) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) continue;
|
||||
}
|
||||
|
||||
const size = entry.size();
|
||||
if (size < 0) continue;
|
||||
|
||||
const alloc_size: usize = @intCast(size);
|
||||
bytes_total += alloc_size;
|
||||
if (bytes_total > MAX_MEMORY_SIZE) return error.ArchiveTooLarge;
|
||||
|
||||
var buf = try allocator.alloc(u8, alloc_size);
|
||||
errdefer allocator.free(buf);
|
||||
|
||||
if (size > 0) {
|
||||
var total: usize = 0;
|
||||
while (total < buf.len) {
|
||||
const read = archive.readData(buf[total..]);
|
||||
if (read <= 0) {
|
||||
if (read < 0) return error.ReadError;
|
||||
break;
|
||||
}
|
||||
total += @intCast(read);
|
||||
}
|
||||
if (total < buf.len) {
|
||||
buf = allocator.realloc(buf, total) catch buf;
|
||||
}
|
||||
}
|
||||
|
||||
const key = try allocator.dupe(u8, normalized);
|
||||
errdefer allocator.free(key);
|
||||
try this.files.put(key, buf);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runFromJS(this: *ExtractJob) void {
|
||||
const globalThis = this.vm.global;
|
||||
const promise = this.promise.swap();
|
||||
defer this.deinit();
|
||||
|
||||
if (this.error_message) |msg| {
|
||||
promise.reject(globalThis, globalThis.createErrorInstance("{s}", .{msg}));
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.destination) |_| {
|
||||
promise.resolve(globalThis, JSValue.jsNumber(this.file_count));
|
||||
} else {
|
||||
const result = JSValue.createEmptyObject(globalThis, this.files.count());
|
||||
var iter = this.files.iterator();
|
||||
while (iter.next()) |e| {
|
||||
const store = jsc.WebCore.Blob.Store.init(e.value_ptr.*, bun.default_allocator);
|
||||
const blob = jsc.WebCore.Blob.initWithStore(store, globalThis);
|
||||
result.put(globalThis, ZigString.fromUTF8(e.key_ptr.*), jsc.WebCore.Blob.new(blob).toJS(globalThis));
|
||||
}
|
||||
promise.resolve(globalThis, result);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(this: *ExtractJob) void {
|
||||
this.poll.unref(this.vm);
|
||||
if (this.destination) |d| bun.default_allocator.free(d);
|
||||
if (this.glob_patterns) |patterns| {
|
||||
for (patterns) |pattern| bun.default_allocator.free(pattern);
|
||||
bun.default_allocator.free(patterns);
|
||||
}
|
||||
bun.default_allocator.free(this.archive_data);
|
||||
|
||||
var iter = this.files.iterator();
|
||||
while (iter.next()) |e| {
|
||||
bun.default_allocator.free(e.key_ptr.*);
|
||||
if (this.destination == null and this.error_message != null) {
|
||||
bun.default_allocator.free(e.value_ptr.*);
|
||||
}
|
||||
}
|
||||
this.files.deinit();
|
||||
this.promise.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Async = bun.Async;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const JSGlobalObject = jsc.JSGlobalObject;
|
||||
const JSValue = jsc.JSValue;
|
||||
const ZigString = jsc.ZigString;
|
||||
212
src/bun.js/api/TarballJob.zig
Normal file
212
src/bun.js/api/TarballJob.zig
Normal file
@@ -0,0 +1,212 @@
|
||||
const MAX_MEMORY_SIZE = 100 * 1024 * 1024;
|
||||
|
||||
pub const Compression = union(enum) {
|
||||
none: void,
|
||||
gzip: u8,
|
||||
};
|
||||
|
||||
pub const FileList = struct {
|
||||
entries: []FileEntry,
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
pub fn deinit(self: *@This()) void {
|
||||
for (self.entries) |*entry| entry.deinit(self.allocator);
|
||||
self.allocator.free(self.entries);
|
||||
}
|
||||
};
|
||||
|
||||
pub const FileEntry = struct {
|
||||
archive_path: []const u8,
|
||||
data: jsc.Node.BlobOrStringOrBuffer,
|
||||
|
||||
pub fn deinit(self: *@This(), allocator: std.mem.Allocator) void {
|
||||
allocator.free(self.archive_path);
|
||||
self.data.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
fn writeEntry(
|
||||
archive: *bun.libarchive.lib.Archive,
|
||||
file_entry: FileEntry,
|
||||
allocator: std.mem.Allocator,
|
||||
) !void {
|
||||
const lib = bun.libarchive.lib;
|
||||
const entry = lib.Archive.Entry.new();
|
||||
defer entry.free();
|
||||
|
||||
const content = file_entry.data.slice();
|
||||
const path_z = try allocator.dupeZ(u8, file_entry.archive_path);
|
||||
defer allocator.free(path_z);
|
||||
|
||||
entry.setPathname(path_z);
|
||||
entry.setSize(@intCast(content.len));
|
||||
entry.setFiletype(@intFromEnum(lib.FileType.regular));
|
||||
entry.setPerm(0o644);
|
||||
entry.setMtime(@intCast(std.time.timestamp()), 0);
|
||||
|
||||
if (archive.writeHeader(entry) != .ok) return error.WriteHeaderError;
|
||||
|
||||
if (content.len > 0) {
|
||||
var offset: usize = 0;
|
||||
while (offset < content.len) {
|
||||
const written = archive.writeData(content[offset..]);
|
||||
if (written <= 0) return error.WriteDataError;
|
||||
offset += @intCast(written);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const TarballJob = struct {
|
||||
files: FileList,
|
||||
destination: ?[]const u8 = null,
|
||||
compression: Compression = .none,
|
||||
task: jsc.WorkPoolTask = .{ .callback = &runTask },
|
||||
promise: jsc.JSPromise.Strong = .{},
|
||||
vm: *jsc.VirtualMachine,
|
||||
output_buffer: []u8 = &.{},
|
||||
bytes_written: usize = 0,
|
||||
error_message: ?[]const u8 = null,
|
||||
any_task: jsc.AnyTask,
|
||||
poll: Async.KeepAlive = .{},
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
pub fn runTask(task: *jsc.WorkPoolTask) void {
|
||||
const job: *TarballJob = @fieldParentPtr("task", task);
|
||||
defer job.vm.enqueueTaskConcurrent(jsc.ConcurrentTask.create(job.any_task.task()));
|
||||
job.createArchive() catch {
|
||||
job.error_message = "Failed to create archive";
|
||||
};
|
||||
}
|
||||
|
||||
fn createArchive(this: *TarballJob) anyerror!void {
|
||||
const allocator = bun.default_allocator;
|
||||
const lib = bun.libarchive.lib;
|
||||
const archive = lib.Archive.writeNew();
|
||||
defer _ = archive.writeFinish();
|
||||
|
||||
if (archive.writeSetFormatUstar() != .ok) return error.ArchiveFormatError;
|
||||
|
||||
switch (this.compression) {
|
||||
.gzip => |level| {
|
||||
if (archive.writeAddFilterGzip() != .ok) return error.CompressionError;
|
||||
var level_buf: [64]u8 = undefined;
|
||||
const level_str = try std.fmt.bufPrintZ(&level_buf, "compression-level={d}", .{level});
|
||||
_ = archive.writeSetOptions(level_str);
|
||||
},
|
||||
.none => {},
|
||||
}
|
||||
|
||||
if (this.destination) |destination| {
|
||||
const path_z = try allocator.dupeZ(u8, destination);
|
||||
defer allocator.free(path_z);
|
||||
if (archive.writeOpenFilename(path_z) != .ok) return error.CannotOpenFile;
|
||||
} else {
|
||||
var estimated_size: usize = 0;
|
||||
for (this.files.entries) |entry| {
|
||||
estimated_size += 512;
|
||||
const blocks = (entry.data.slice().len + 511) / 512;
|
||||
estimated_size += blocks * 512;
|
||||
}
|
||||
const required_size = estimated_size;
|
||||
if (required_size > MAX_MEMORY_SIZE) return error.ArchiveTooLarge;
|
||||
|
||||
const buffer_size = @max((required_size + 1024) * 2, 16384);
|
||||
this.output_buffer = try allocator.alloc(u8, buffer_size);
|
||||
switch (archive.writeOpenMemory(this.output_buffer.ptr, this.output_buffer.len, &this.bytes_written)) {
|
||||
.ok => {},
|
||||
else => {
|
||||
allocator.free(this.output_buffer);
|
||||
this.output_buffer = &.{};
|
||||
return error.CannotOpenMemory;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
for (this.files.entries) |file_entry| {
|
||||
try writeEntry(archive, file_entry, allocator);
|
||||
}
|
||||
|
||||
switch (archive.writeClose()) {
|
||||
.ok, .warn => {},
|
||||
else => return error.ArchiveCloseError,
|
||||
}
|
||||
|
||||
if (this.destination) |destination| {
|
||||
const file = (if (std.fs.path.isAbsolute(destination))
|
||||
std.fs.openFileAbsolute(destination, .{})
|
||||
else
|
||||
std.fs.cwd().openFile(destination, .{})) catch return error.CannotOpenFile;
|
||||
defer file.close();
|
||||
this.bytes_written = (file.stat() catch return error.CannotStatFile).size;
|
||||
} else {
|
||||
this.output_buffer = allocator.realloc(this.output_buffer, this.bytes_written) catch this.output_buffer;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn runFromJS(this: *TarballJob) void {
|
||||
defer this.deinit();
|
||||
if (this.vm.isShuttingDown()) return;
|
||||
|
||||
const globalThis = this.vm.global;
|
||||
const promise = this.promise.swap();
|
||||
|
||||
if (this.error_message) |err_msg| {
|
||||
promise.reject(globalThis, globalThis.createErrorInstance("{s}", .{err_msg}));
|
||||
return;
|
||||
}
|
||||
|
||||
const result_value = if (this.destination != null) blk: {
|
||||
break :blk jsc.JSValue.jsNumber(@as(f64, @floatFromInt(this.bytes_written)));
|
||||
} else blk: {
|
||||
const store = jsc.WebCore.Blob.Store.init(this.output_buffer, bun.default_allocator);
|
||||
var blob = jsc.WebCore.Blob.initWithStore(store, globalThis);
|
||||
blob.content_type = switch (this.compression) {
|
||||
.gzip => "application/gzip",
|
||||
.none => "application/x-tar",
|
||||
};
|
||||
this.output_buffer = &.{};
|
||||
break :blk jsc.WebCore.Blob.new(blob).toJS(globalThis);
|
||||
};
|
||||
|
||||
promise.resolve(globalThis, result_value);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *TarballJob) void {
|
||||
this.poll.unref(this.vm);
|
||||
this.files.deinit();
|
||||
if (this.destination) |dest| bun.default_allocator.free(dest);
|
||||
this.promise.deinit();
|
||||
if (this.output_buffer.len > 0) bun.default_allocator.free(this.output_buffer);
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
pub fn create(
|
||||
vm: *jsc.VirtualMachine,
|
||||
globalThis: *jsc.JSGlobalObject,
|
||||
files: FileList,
|
||||
destination: ?[]const u8,
|
||||
compression: Compression,
|
||||
) *TarballJob {
|
||||
var job = TarballJob.new(.{
|
||||
.files = files,
|
||||
.destination = destination,
|
||||
.compression = compression,
|
||||
.vm = vm,
|
||||
.any_task = jsc.AnyTask.New(@This(), &runFromJS).init(undefined),
|
||||
});
|
||||
|
||||
job.promise = jsc.JSPromise.Strong.init(globalThis);
|
||||
job.any_task = jsc.AnyTask.New(@This(), &runFromJS).init(job);
|
||||
job.poll.ref(vm);
|
||||
jsc.WorkPool.schedule(&job.task);
|
||||
|
||||
return job;
|
||||
}
|
||||
};
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const Async = bun.Async;
|
||||
const jsc = bun.jsc;
|
||||
@@ -44,6 +44,7 @@
|
||||
macro(createParsedShellScript) \
|
||||
macro(createShellInterpreter) \
|
||||
macro(deflateSync) \
|
||||
macro(extract) \
|
||||
macro(file) \
|
||||
macro(fs) \
|
||||
macro(gc) \
|
||||
@@ -68,6 +69,7 @@
|
||||
macro(spawn) \
|
||||
macro(spawnSync) \
|
||||
macro(stringWidth) \
|
||||
macro(tarball) \
|
||||
macro(udpSocket) \
|
||||
macro(which) \
|
||||
macro(write) \
|
||||
|
||||
@@ -796,6 +796,8 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
stdout BunObject_lazyPropCb_wrap_stdout DontDelete|PropertyCallback
|
||||
stringWidth Generated::BunObject::jsStringWidth DontDelete|Function 2
|
||||
stripANSI jsFunctionBunStripANSI DontDelete|Function 1
|
||||
extract BunObject_callback_extract DontDelete|Function 2
|
||||
tarball BunObject_callback_tarball DontDelete|Function 1
|
||||
unsafe BunObject_lazyPropCb_wrap_unsafe DontDelete|PropertyCallback
|
||||
version constructBunVersion ReadOnly|DontDelete|PropertyCallback
|
||||
which BunObject_callback_which DontDelete|Function 1
|
||||
|
||||
391
test/js/bun/tarball/extract.test.ts
Normal file
391
test/js/bun/tarball/extract.test.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
describe("Bun.extract()", () => {
|
||||
test("extracts tar archive to memory", async () => {
|
||||
// First create a tarball
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"hello.txt": "hello world",
|
||||
"data/test.json": '{"key":"value"}',
|
||||
},
|
||||
});
|
||||
|
||||
// Extract it to memory
|
||||
const files = await Bun.extract(tarBlob);
|
||||
|
||||
expect(files).toBeInstanceOf(Object);
|
||||
expect(Object.keys(files)).toHaveLength(2);
|
||||
expect(files["hello.txt"]).toBeInstanceOf(Blob);
|
||||
expect(files["data/test.json"]).toBeInstanceOf(Blob);
|
||||
|
||||
// Verify content
|
||||
expect(await files["hello.txt"].text()).toBe("hello world");
|
||||
expect(await files["data/test.json"].text()).toBe('{"key":"value"}');
|
||||
});
|
||||
|
||||
test("extracts tar archive to disk", async () => {
|
||||
// Create a tarball
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"readme.txt": "This is a readme",
|
||||
"src/index.js": "console.log('hello');",
|
||||
},
|
||||
});
|
||||
|
||||
using dir = tempDir("extract-test", {});
|
||||
|
||||
// Extract to disk
|
||||
const fileCount = await Bun.extract(tarBlob, {
|
||||
destination: String(dir),
|
||||
});
|
||||
|
||||
expect(fileCount).toBe(2);
|
||||
|
||||
// Verify files exist on disk
|
||||
const readmeContent = await Bun.file(join(dir, "readme.txt")).text();
|
||||
expect(readmeContent).toBe("This is a readme");
|
||||
|
||||
const indexContent = await Bun.file(join(dir, "src/index.js")).text();
|
||||
expect(indexContent).toBe("console.log('hello');");
|
||||
});
|
||||
|
||||
test("extracts from file path", async () => {
|
||||
using dir = tempDir("extract-path-test", {});
|
||||
|
||||
// Create and save a tarball
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "content",
|
||||
},
|
||||
});
|
||||
|
||||
const tarPath = join(dir, "archive.tar");
|
||||
await Bun.write(tarPath, tarBlob);
|
||||
|
||||
// Extract from path
|
||||
const files = await Bun.extract(tarPath);
|
||||
|
||||
expect(files["test.txt"]).toBeInstanceOf(Blob);
|
||||
expect(await files["test.txt"].text()).toBe("content");
|
||||
});
|
||||
|
||||
test("handles skipPathComponents option", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"a/b/c/file.txt": "nested content",
|
||||
"a/b/other.txt": "other content",
|
||||
},
|
||||
});
|
||||
|
||||
// Skip first 2 components (a/b/)
|
||||
const files = await Bun.extract(tarBlob, {
|
||||
skipPathComponents: 2,
|
||||
});
|
||||
|
||||
expect(files["c/file.txt"]).toBeInstanceOf(Blob);
|
||||
expect(files["other.txt"]).toBeInstanceOf(Blob);
|
||||
expect(await files["c/file.txt"].text()).toBe("nested content");
|
||||
expect(await files["other.txt"].text()).toBe("other content");
|
||||
});
|
||||
|
||||
test("works with gzipped tar", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"compressed.txt": "x".repeat(1000),
|
||||
},
|
||||
compress: "gzip",
|
||||
});
|
||||
|
||||
const files = await Bun.extract(tarBlob);
|
||||
|
||||
expect(files["compressed.txt"]).toBeInstanceOf(Blob);
|
||||
expect(await files["compressed.txt"].text()).toBe("x".repeat(1000));
|
||||
});
|
||||
|
||||
test("throws for invalid archive", async () => {
|
||||
await expect(Bun.extract(new Blob(["not a tar file"]))).rejects.toThrow();
|
||||
});
|
||||
|
||||
test("throws when no arguments provided", () => {
|
||||
expect(() => {
|
||||
// @ts-expect-error - testing invalid args
|
||||
Bun.extract();
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
test("throws for invalid archive type", () => {
|
||||
expect(() => {
|
||||
// @ts-expect-error - testing invalid args
|
||||
Bun.extract(123);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
test("throws for non-existent file path", () => {
|
||||
expect(() => Bun.extract("/this/path/does/not/exist.tar")).toThrow();
|
||||
});
|
||||
|
||||
test("roundtrip: create tarball and extract to memory", async () => {
|
||||
const original = {
|
||||
"README.md": "# My Project\n\nThis is a test project.",
|
||||
"src/index.ts": 'export const hello = "world";\n',
|
||||
"src/utils/helper.ts": "export function add(a: number, b: number) { return a + b; }",
|
||||
"package.json": '{\n "name": "test",\n "version": "1.0.0"\n}',
|
||||
};
|
||||
|
||||
// Create tarball
|
||||
const tarBlob = await Bun.tarball({ files: original });
|
||||
|
||||
// Extract it back
|
||||
const extracted = await Bun.extract(tarBlob);
|
||||
|
||||
// Verify all files exist with correct content
|
||||
expect(Object.keys(extracted)).toHaveLength(4);
|
||||
for (const [path, content] of Object.entries(original)) {
|
||||
expect(extracted[path]).toBeInstanceOf(Blob);
|
||||
expect(await extracted[path].text()).toBe(content);
|
||||
}
|
||||
});
|
||||
|
||||
test("roundtrip: create gzipped tarball and extract", async () => {
|
||||
const original = {
|
||||
"file1.txt": "a".repeat(1000),
|
||||
"file2.txt": "b".repeat(1000),
|
||||
};
|
||||
|
||||
// Create gzipped tarball
|
||||
const tarBlob = await Bun.tarball({ files: original, compress: "gzip" });
|
||||
|
||||
// Verify it's actually compressed (should be much smaller)
|
||||
expect(tarBlob.size).toBeLessThan(2000);
|
||||
|
||||
// Extract it back
|
||||
const extracted = await Bun.extract(tarBlob);
|
||||
|
||||
// Verify content
|
||||
expect(await extracted["file1.txt"].text()).toBe("a".repeat(1000));
|
||||
expect(await extracted["file2.txt"].text()).toBe("b".repeat(1000));
|
||||
});
|
||||
|
||||
test("roundtrip: create tarball, save to disk, extract from disk", async () => {
|
||||
using dir = tempDir("roundtrip-disk", {});
|
||||
|
||||
const original = {
|
||||
"test.txt": "Hello from disk!",
|
||||
"nested/file.txt": "Nested content",
|
||||
};
|
||||
|
||||
// Create tarball and save to disk
|
||||
const tarBlob = await Bun.tarball({ files: original });
|
||||
const tarPath = join(dir, "archive.tar");
|
||||
await Bun.write(tarPath, tarBlob);
|
||||
|
||||
// Extract from disk path
|
||||
const extracted = await Bun.extract(tarPath);
|
||||
|
||||
// Verify content
|
||||
expect(await extracted["test.txt"].text()).toBe("Hello from disk!");
|
||||
expect(await extracted["nested/file.txt"].text()).toBe("Nested content");
|
||||
});
|
||||
|
||||
test("roundtrip: tarball with destination, extract with destination", async () => {
|
||||
using createDir = tempDir("roundtrip-create", {});
|
||||
using extractDir = tempDir("roundtrip-extract", {});
|
||||
|
||||
const original = {
|
||||
"a.txt": "File A",
|
||||
"b.txt": "File B",
|
||||
};
|
||||
|
||||
// Create tarball with destination (to disk)
|
||||
const tarPath = join(createDir, "output.tar");
|
||||
await Bun.tarball({ files: original, destination: tarPath });
|
||||
|
||||
// Verify tarball was created
|
||||
expect(await Bun.file(tarPath).exists()).toBe(true);
|
||||
|
||||
// Extract to another directory
|
||||
const fileCount = await Bun.extract(tarPath, {
|
||||
destination: String(extractDir),
|
||||
});
|
||||
|
||||
expect(fileCount).toBe(2);
|
||||
|
||||
// Verify files
|
||||
expect(await Bun.file(join(extractDir, "a.txt")).text()).toBe("File A");
|
||||
expect(await Bun.file(join(extractDir, "b.txt")).text()).toBe("File B");
|
||||
});
|
||||
|
||||
test("roundtrip: extract with skipPathComponents", async () => {
|
||||
const original = {
|
||||
"project/src/main.ts": "main content",
|
||||
"project/src/lib/utils.ts": "utils content",
|
||||
"project/tests/test.ts": "test content",
|
||||
};
|
||||
|
||||
const tarBlob = await Bun.tarball({ files: original });
|
||||
|
||||
// Extract skipping first component (project/)
|
||||
const extracted = await Bun.extract(tarBlob, {
|
||||
skipPathComponents: 1,
|
||||
});
|
||||
|
||||
// Verify paths are stripped
|
||||
expect(extracted["src/main.ts"]).toBeInstanceOf(Blob);
|
||||
expect(extracted["src/lib/utils.ts"]).toBeInstanceOf(Blob);
|
||||
expect(extracted["tests/test.ts"]).toBeInstanceOf(Blob);
|
||||
expect(await extracted["src/main.ts"].text()).toBe("main content");
|
||||
});
|
||||
|
||||
test("accepts Buffer as input", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: { "test.txt": "buffer test" },
|
||||
});
|
||||
|
||||
// Convert to Buffer
|
||||
const buffer = Buffer.from(await tarBlob.arrayBuffer());
|
||||
|
||||
// Extract from Buffer
|
||||
const extracted = await Bun.extract(buffer);
|
||||
|
||||
expect(await extracted["test.txt"].text()).toBe("buffer test");
|
||||
});
|
||||
|
||||
test("accepts ArrayBuffer as input", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: { "test.txt": "arraybuffer test" },
|
||||
});
|
||||
|
||||
// Convert to ArrayBuffer
|
||||
const arrayBuffer = await tarBlob.arrayBuffer();
|
||||
|
||||
// Extract from ArrayBuffer
|
||||
const extracted = await Bun.extract(new Uint8Array(arrayBuffer));
|
||||
|
||||
expect(await extracted["test.txt"].text()).toBe("arraybuffer test");
|
||||
});
|
||||
|
||||
test("handles archives with directory entries", async () => {
|
||||
using dir = tempDir("dir-entries-test", {});
|
||||
|
||||
// Create a tarball that includes directory entries
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"dir1/file1.txt": "content1",
|
||||
"dir1/dir2/file2.txt": "content2",
|
||||
"dir3/file3.txt": "content3",
|
||||
},
|
||||
});
|
||||
|
||||
// Extract to disk
|
||||
const count = await Bun.extract(tarBlob, {
|
||||
destination: String(dir),
|
||||
});
|
||||
|
||||
// Should count all files
|
||||
expect(count).toBeGreaterThanOrEqual(3);
|
||||
|
||||
// Verify nested directories were created
|
||||
expect(await Bun.file(join(dir, "dir1/file1.txt")).text()).toBe("content1");
|
||||
expect(await Bun.file(join(dir, "dir1/dir2/file2.txt")).text()).toBe("content2");
|
||||
expect(await Bun.file(join(dir, "dir3/file3.txt")).text()).toBe("content3");
|
||||
});
|
||||
|
||||
test("handles skipPathComponents resulting in empty paths", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"a/file.txt": "content",
|
||||
"b": "another", // Only one component
|
||||
},
|
||||
});
|
||||
|
||||
// Skip 2 components - "b" entry should be skipped entirely
|
||||
const extracted = await Bun.extract(tarBlob, {
|
||||
skipPathComponents: 2,
|
||||
});
|
||||
|
||||
// Only entries with sufficient path depth should remain
|
||||
expect(Object.keys(extracted)).not.toContain("b");
|
||||
expect(Object.keys(extracted)).not.toContain("");
|
||||
});
|
||||
|
||||
test("extracts archive created by tar CLI", async () => {
|
||||
using dir = tempDir("cli-tar-test", {
|
||||
"source/file1.txt": "content1",
|
||||
"source/nested/file2.txt": "content2",
|
||||
});
|
||||
|
||||
const tarPath = join(dir, "archive.tar");
|
||||
|
||||
// Create tarball using system tar command
|
||||
await Bun.$`cd ${dir} && tar -cf archive.tar source/`.quiet();
|
||||
|
||||
// Extract using Bun.extract
|
||||
const extracted = await Bun.extract(tarPath);
|
||||
|
||||
// Verify files were extracted
|
||||
expect(extracted["source/file1.txt"]).toBeInstanceOf(Blob);
|
||||
expect(extracted["source/nested/file2.txt"]).toBeInstanceOf(Blob);
|
||||
expect(await extracted["source/file1.txt"].text()).toBe("content1");
|
||||
expect(await extracted["source/nested/file2.txt"].text()).toBe("content2");
|
||||
});
|
||||
|
||||
test("handles archives with trailing slashes in paths", async () => {
|
||||
using dir = tempDir("trailing-slash-test", {});
|
||||
|
||||
// Create using tar CLI which includes directory entries with trailing slashes
|
||||
const srcDir = join(dir, "src");
|
||||
await Bun.$`mkdir -p ${srcDir}/a/b`.quiet();
|
||||
await Bun.write(join(srcDir, "a/b/file.txt"), "content");
|
||||
const tarPath = join(dir, "test.tar");
|
||||
await Bun.$`cd ${dir} && tar -cf test.tar src/`.quiet();
|
||||
|
||||
// Extract to memory
|
||||
const extracted = await Bun.extract(tarPath);
|
||||
|
||||
// Should have the file (directories might not be in the result)
|
||||
expect(extracted["src/a/b/file.txt"]).toBeInstanceOf(Blob);
|
||||
expect(await extracted["src/a/b/file.txt"].text()).toBe("content");
|
||||
});
|
||||
|
||||
test("extracts large file correctly", async () => {
|
||||
const largeContent = "x".repeat(1024 * 1024); // 1MB
|
||||
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"large.txt": largeContent,
|
||||
},
|
||||
});
|
||||
|
||||
const extracted = await Bun.extract(tarBlob);
|
||||
|
||||
expect(await extracted["large.txt"].text()).toBe(largeContent);
|
||||
});
|
||||
|
||||
test("handles empty archive", async () => {
|
||||
// Create an archive with no files
|
||||
using dir = tempDir("empty-tar-test", {});
|
||||
const tarPath = join(dir, "empty.tar");
|
||||
await Bun.$`tar -cf ${tarPath} -T /dev/null`.quiet();
|
||||
|
||||
const extracted = await Bun.extract(tarPath);
|
||||
|
||||
expect(Object.keys(extracted)).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("validates skipPathComponents range", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "content",
|
||||
},
|
||||
});
|
||||
|
||||
// Should reject values > 128
|
||||
expect(() =>
|
||||
Bun.extract(tarBlob, {
|
||||
skipPathComponents: 129,
|
||||
}),
|
||||
).toThrow("skipPathComponents must be between 0 and 128");
|
||||
});
|
||||
});
|
||||
175
test/js/bun/tarball/tarball.test.ts
Normal file
175
test/js/bun/tarball/tarball.test.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { tempDir } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
describe("Bun.tarball()", () => {
|
||||
test("creates tar archive from string content", async () => {
|
||||
const blob = await Bun.tarball({
|
||||
files: {
|
||||
"hello.txt": "hello world",
|
||||
},
|
||||
});
|
||||
|
||||
expect(blob).toBeInstanceOf(Blob);
|
||||
expect(blob.type).toBe("application/x-tar");
|
||||
expect(blob.size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("creates tar from multiple files", async () => {
|
||||
const blob = await Bun.tarball({
|
||||
files: {
|
||||
"file1.txt": "content 1",
|
||||
"subdir/file2.txt": "content 2",
|
||||
"file3.txt": "content 3",
|
||||
},
|
||||
});
|
||||
|
||||
expect(blob.size).toBeGreaterThan(0);
|
||||
|
||||
using dir = tempDir("tarball-test", {});
|
||||
const tarPath = join(dir, "output.tar");
|
||||
await Bun.write(tarPath, blob);
|
||||
|
||||
const { exitCode, stdout } = Bun.spawnSync(["tar", "-tf", tarPath], { stdout: "pipe" });
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
const files = new TextDecoder().decode(stdout).split("\n");
|
||||
expect(files).toContain("file1.txt");
|
||||
expect(files).toContain("subdir/file2.txt");
|
||||
expect(files).toContain("file3.txt");
|
||||
});
|
||||
|
||||
test("accepts Blob inputs", async () => {
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: {
|
||||
"file1.txt": new Blob(["content 1"]),
|
||||
"file2.txt": new Blob(["content 2"]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(tarBlob).toBeInstanceOf(Blob);
|
||||
});
|
||||
|
||||
test("handles large files", async () => {
|
||||
const largeContent = "x".repeat(5 * 1024 * 1024);
|
||||
const tarBlob = await Bun.tarball({
|
||||
files: { "large.txt": largeContent },
|
||||
});
|
||||
|
||||
expect(tarBlob.size).toBeGreaterThan(5 * 1024 * 1024);
|
||||
});
|
||||
|
||||
test("extracts correctly with tar CLI and verifies content", async () => {
|
||||
const blob = await Bun.tarball({
|
||||
files: {
|
||||
"readme.txt": "This is a readme file",
|
||||
"data/config.json": '{"name":"test","version":"1.0"}',
|
||||
"scripts/run.sh": "#!/bin/bash\necho 'Hello World'",
|
||||
},
|
||||
});
|
||||
|
||||
using dir = tempDir("tarball-extract", {});
|
||||
const tarPath = join(dir, "archive.tar");
|
||||
await Bun.write(tarPath, blob);
|
||||
|
||||
// Extract the tar file
|
||||
const { exitCode } = Bun.spawnSync(["tar", "-xf", tarPath, "-C", String(dir)], {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify extracted files exist and have correct content
|
||||
const readmeContent = await Bun.file(join(dir, "readme.txt")).text();
|
||||
expect(readmeContent).toBe("This is a readme file");
|
||||
|
||||
const configContent = await Bun.file(join(dir, "data/config.json")).text();
|
||||
expect(configContent).toBe('{"name":"test","version":"1.0"}');
|
||||
|
||||
const scriptContent = await Bun.file(join(dir, "scripts/run.sh")).text();
|
||||
expect(scriptContent).toBe("#!/bin/bash\necho 'Hello World'");
|
||||
});
|
||||
|
||||
test("creates gzip compressed tar with string format", async () => {
|
||||
const blob = await Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "hello world",
|
||||
},
|
||||
compress: "gzip",
|
||||
});
|
||||
|
||||
expect(blob).toBeInstanceOf(Blob);
|
||||
expect(blob.type).toBe("application/gzip");
|
||||
|
||||
using dir = tempDir("tarball-gzip", {});
|
||||
const tarPath = join(dir, "archive.tar.gz");
|
||||
await Bun.write(tarPath, blob);
|
||||
|
||||
// Extract with gzip flag
|
||||
const { exitCode } = Bun.spawnSync(["tar", "-xzf", tarPath, "-C", String(dir)], {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const content = await Bun.file(join(dir, "test.txt")).text();
|
||||
expect(content).toBe("hello world");
|
||||
});
|
||||
|
||||
test("creates gzip compressed tar with level option", async () => {
|
||||
const blob = await Bun.tarball({
|
||||
files: {
|
||||
"data.txt": "x".repeat(1000),
|
||||
},
|
||||
compress: { type: "gzip", level: 9 },
|
||||
});
|
||||
|
||||
expect(blob).toBeInstanceOf(Blob);
|
||||
|
||||
using dir = tempDir("tarball-gzip-level", {});
|
||||
const tarPath = join(dir, "archive.tar.gz");
|
||||
await Bun.write(tarPath, blob);
|
||||
|
||||
const { exitCode } = Bun.spawnSync(["tar", "-xzf", tarPath, "-C", String(dir)], {
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
const content = await Bun.file(join(dir, "data.txt")).text();
|
||||
expect(content).toBe("x".repeat(1000));
|
||||
});
|
||||
|
||||
test("throws error for invalid compression level", () => {
|
||||
expect(() =>
|
||||
Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "hello world",
|
||||
},
|
||||
compress: { type: "gzip", level: 100 },
|
||||
}),
|
||||
).toThrow("compression level must be 0-9");
|
||||
});
|
||||
|
||||
test("throws error for negative compression level", () => {
|
||||
expect(() =>
|
||||
Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "hello world",
|
||||
},
|
||||
compress: { type: "gzip", level: -5 },
|
||||
}),
|
||||
).toThrow("compression level must be 0-9");
|
||||
});
|
||||
|
||||
test("throws error for invalid compress type", () => {
|
||||
expect(() =>
|
||||
Bun.tarball({
|
||||
files: {
|
||||
"test.txt": "hello world",
|
||||
},
|
||||
compress: { type: "bzip2" },
|
||||
}),
|
||||
).toThrow("Only 'gzip' compression supported");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user