bun-types: add static modifiers to static S3Client methods (#18818) (#18819)

Co-authored-by: Alistair Smith <hi@alistair.sh>
This commit is contained in:
Teodor Atroshenko
2025-04-14 20:57:17 +02:00
committed by GitHub
parent ab431f158a
commit 7e03e5e712

View File

@@ -9,17 +9,30 @@ declare module "bun" {
* Write a chunk of data to the file.
*
* If the file descriptor is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to disk or the pipe.
*
* @returns Number of bytes flushed or a Promise resolving to the number of bytes
*/
flush(): number | Promise<number>;
/**
* Close the file descriptor. This also flushes the internal buffer.
*
* @param error Optional error to associate with the close operation
* @returns Number of bytes written or a Promise resolving to the number of bytes
*/
end(error?: Error): number | Promise<number>;
/**
* Start the file sink with provided options.
*
* @param options Configuration options for the file sink
*/
start(options?: {
/**
* Preallocate an internal buffer of this size
@@ -63,19 +76,29 @@ declare module "bun" {
* Write a chunk of data to the network.
*
* If the network is not writable yet, the data is buffered.
*
* @param chunk The data to write
* @returns Number of bytes written
*/
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer, committing the data to the network.
*
* @returns Number of bytes flushed or a Promise resolving to the number of bytes
*/
flush(): number | Promise<number>;
/**
* Finish the upload. This also flushes the internal buffer.
*
* @param error Optional error to associate with the end operation
* @returns Number of bytes written or a Promise resolving to the number of bytes
*/
end(error?: Error): number | Promise<number>;
/**
* Get the stat of the file.
*
* @returns Promise resolving to the file stats
*/
stat(): Promise<import("node:fs").Stats>;
}
@@ -649,7 +672,7 @@ declare module "bun" {
contents?: {
/** The algorithm that was used to create a checksum of the object. */
checksumAlgorithm?: "CRC32" | "CRC32C" | "SHA1" | "SHA256" | "CRC64NVME";
/** The checksum type that is used to calculate the objects checksum value. */
/** The checksum type that is used to calculate the object's checksum value. */
checksumType?: "COMPOSITE" | "FULL_OBJECT";
/**
* The entity tag is a hash of the object. The ETag reflects changes only to the contents of an object, not its metadata. The ETag may or may not be an MD5 digest of the object data. Whether or not it is depends on how the object was created and how it is encrypted as described below:
@@ -748,6 +771,7 @@ declare module "bun" {
*
* @param options The default options to use for the S3 client. Can be
* overriden by passing options to the methods.
* @returns A new S3Client instance
*
* ## Keep S3 credentials in a single instance
*
@@ -779,20 +803,49 @@ declare module "bun" {
/**
* Creates an S3File instance for the given path.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns An S3File instance
*
* @example
* const file = bucket.file("image.jpg");
* await file.write(imageData);
* const configFile = bucket.file("config.json", {
* type: "application/json",
* acl: "private"
* });
* const file = bucket.file("image.jpg");
* await file.write(imageData);
*
* const configFile = bucket.file("config.json", {
* type: "application/json",
* acl: "private"
* });
*/
file(path: string, options?: S3Options): S3File;
/**
* Creates an S3File instance for the given path.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns An S3File instance
*
* @example
* const file = S3Client.file("image.jpg", credentials);
* await file.write(imageData);
*
* const configFile = S3Client.file("config.json", {
* ...credentials,
* type: "application/json",
* acl: "private"
* });
*/
static file(path: string, options?: S3Options): S3File;
/**
* Writes data directly to a path in the bucket.
* Supports strings, buffers, streams, and web API types.
*
* @param path The path to the file in the bucket
* @param data The data to write to the file
* @param options Additional S3 options to override defaults
* @returns The number of bytes written
*
* @example
* // Write string
* await bucket.write("hello.txt", "Hello World");
@@ -830,10 +883,64 @@ declare module "bun" {
options?: S3Options,
): Promise<number>;
/**
* Writes data directly to a path in the bucket.
* Supports strings, buffers, streams, and web API types.
*
* @param path The path to the file in the bucket
* @param data The data to write to the file
* @param options S3 credentials and configuration options
* @returns The number of bytes written
*
* @example
* // Write string
* await S3Client.write("hello.txt", "Hello World", credentials);
*
* // Write JSON with type
* await S3Client.write(
* "data.json",
* JSON.stringify({hello: "world"}),
* {
* ...credentials,
* type: "application/json"
* }
* );
*
* // Write from fetch
* const res = await fetch("https://example.com/data");
* await S3Client.write("data.bin", res, credentials);
*
* // Write with ACL
* await S3Client.write("public.html", html, {
* ...credentials,
* acl: "public-read",
* type: "text/html"
* });
*/
static write(
path: string,
data:
| string
| ArrayBufferView
| ArrayBuffer
| SharedArrayBuffer
| Request
| Response
| BunFile
| S3File
| Blob
| File,
options?: S3Options,
): Promise<number>;
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
*
* @param path The path to the file in the bucket
* @param options Options for generating the presigned URL
* @returns A presigned URL string
*
* @example
* // Download URL
* const downloadUrl = bucket.presign("file.pdf", {
@@ -856,9 +963,46 @@ declare module "bun" {
*/
presign(path: string, options?: S3FilePresignOptions): string;
/**
* Generate a presigned URL for temporary access to a file.
* Useful for generating upload/download URLs without exposing credentials.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and presigned URL configuration
* @returns A presigned URL string
*
* @example
* // Download URL
* const downloadUrl = S3Client.presign("file.pdf", {
* ...credentials,
* expiresIn: 3600 // 1 hour
* });
*
* // Upload URL
* const uploadUrl = S3Client.presign("uploads/image.jpg", {
* ...credentials,
* method: "PUT",
* expiresIn: 3600,
* type: "image/jpeg",
* acl: "public-read"
* });
*
* // Long-lived public URL
* const publicUrl = S3Client.presign("public/doc.pdf", {
* ...credentials,
* expiresIn: 7 * 24 * 60 * 60, // 7 days
* acl: "public-read"
* });
*/
static presign(path: string, options?: S3FilePresignOptions): string;
/**
* Delete a file from the bucket.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await bucket.unlink("old-file.txt");
@@ -872,12 +1016,80 @@ declare module "bun" {
* }
*/
unlink(path: string, options?: S3Options): Promise<void>;
delete: S3Client["unlink"];
/**
* Delete a file from the bucket.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await S3Client.unlink("old-file.txt", credentials);
*
* // With error handling
* try {
* await S3Client.unlink("file.dat", credentials);
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
static unlink(path: string, options?: S3Options): Promise<void>;
/**
* Delete a file from the bucket.
* Alias for {@link S3Client.unlink}.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await bucket.delete("old-file.txt");
*
* // With error handling
* try {
* await bucket.delete("file.dat");
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
delete(path: string, options?: S3Options): Promise<void>;
/**
* Delete a file from the bucket.
* Alias for {@link S3Client.unlink}.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves when deletion is complete
*
* @example
* // Simple delete
* await S3Client.delete("old-file.txt", credentials);
*
* // With error handling
* try {
* await S3Client.delete("file.dat", credentials);
* console.log("File deleted");
* } catch (err) {
* console.error("Delete failed:", err);
* }
*/
static delete(path: string, options?: S3Options): Promise<void>;
/**
* Get the size of a file in bytes.
* Uses HEAD request to efficiently get size.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the file size in bytes
*
* @example
* // Get size
* const bytes = await bucket.size("video.mp4");
@@ -890,10 +1102,34 @@ declare module "bun" {
*/
size(path: string, options?: S3Options): Promise<number>;
/**
* Get the size of a file in bytes.
* Uses HEAD request to efficiently get size.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the file size in bytes
*
* @example
* // Get size
* const bytes = await S3Client.size("video.mp4", credentials);
* console.log(`Size: ${bytes} bytes`);
*
* // Check if file is large
* if (await S3Client.size("data.zip", credentials) > 100 * 1024 * 1024) {
* console.log("File is larger than 100MB");
* }
*/
static size(path: string, options?: S3Options): Promise<number>;
/**
* Check if a file exists in the bucket.
* Uses HEAD request to check existence.
*
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to true if the file exists, false otherwise
*
* @example
* // Check existence
* if (await bucket.exists("config.json")) {
@@ -911,23 +1147,124 @@ declare module "bun" {
* }
*/
exists(path: string, options?: S3Options): Promise<boolean>;
/**
* Check if a file exists in the bucket.
* Uses HEAD request to check existence.
*
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to true if the file exists, false otherwise
*
* @example
* // Check existence
* if (await S3Client.exists("config.json", credentials)) {
* const file = bucket.file("config.json");
* const config = await file.json();
* }
*
* // With error handling
* try {
* if (!await S3Client.exists("required.txt", credentials)) {
* throw new Error("Required file missing");
* }
* } catch (err) {
* console.error("Check failed:", err);
* }
*/
static exists(path: string, options?: S3Options): Promise<boolean>;
/**
* Get the stat of a file in an S3-compatible storage service.
*
* @param path The path to the file.
* @param options The options to use for the S3 client.
* @param path The path to the file in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the file stats
*
* @example
* const stat = await bucket.stat("my-file.txt");
*/
stat(path: string, options?: S3Options): Promise<S3Stats>;
/** Returns some or all (up to 1,000) of the objects in a bucket with each request.
/**
* Get the stat of a file in an S3-compatible storage service.
*
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
* @param path The path to the file in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the file stats
*
* @example
* const stat = await S3Client.stat("my-file.txt", credentials);
*/
static stat(path: string, options?: S3Options): Promise<S3Stats>;
/**
* Returns some or all (up to 1,000) of the objects in a bucket with each request.
*
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
*
* @param input Options for listing objects in the bucket
* @param options Additional S3 options to override defaults
* @returns A promise that resolves to the list response
*
* @example
* // List (up to) 1000 objects in the bucket
* const allObjects = await bucket.list();
*
* // List (up to) 500 objects under `uploads/` prefix, with owner field for each object
* const uploads = await bucket.list({
* prefix: 'uploads/',
* maxKeys: 500,
* fetchOwner: true,
* });
*
* // Check if more results are available
* if (uploads.isTruncated) {
* // List next batch of objects under `uploads/` prefix
* const moreUploads = await bucket.list({
* prefix: 'uploads/',
* maxKeys: 500,
* startAfter: uploads.contents!.at(-1).key
* fetchOwner: true,
* });
* }
*/
list(
input?: S3ListObjectsOptions | null,
options?: Pick<S3Options, "accessKeyId" | "secretAccessKey" | "sessionToken" | "region" | "bucket" | "endpoint">,
): Promise<S3ListObjectsResponse>;
/**
* Returns some or all (up to 1,000) of the objects in a bucket with each request.
*
* You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
*
* @param input Options for listing objects in the bucket
* @param options S3 credentials and configuration options
* @returns A promise that resolves to the list response
*
* @example
* // List (up to) 1000 objects in the bucket
* const allObjects = await S3Client.list(null, credentials);
*
* // List (up to) 500 objects under `uploads/` prefix, with owner field for each object
* const uploads = await S3Client.list({
* prefix: 'uploads/',
* maxKeys: 500,
* fetchOwner: true,
* }, credentials);
*
* // Check if more results are available
* if (uploads.isTruncated) {
* // List next batch of objects under `uploads/` prefix
* const moreUploads = await S3Client.list({
* prefix: 'uploads/',
* maxKeys: 500,
* startAfter: uploads.contents!.at(-1).key
* fetchOwner: true,
* }, credentials);
* }
*/
static list(
input?: S3ListObjectsOptions | null,
options?: Pick<S3Options, "accessKeyId" | "secretAccessKey" | "sessionToken" | "region" | "bucket" | "endpoint">,