diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts
index 712ee4ed3e..23f1c6d3ba 100644
--- a/packages/bun-types/bun.d.ts
+++ b/packages/bun-types/bun.d.ts
@@ -1435,6 +1435,28 @@ declare module "bun" {
*/
type?: string;
+ /**
+ * By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects.
+ *
+ * @example
+ * // Setting explicit Storage class
+ * const file = s3("my-file.json", {
+ * storageClass: "STANDARD_IA"
+ * });
+ */
+ storageClass?:
+ | "STANDARD"
+ | "DEEP_ARCHIVE"
+ | "EXPRESS_ONEZONE"
+ | "GLACIER"
+ | "GLACIER_IR"
+ | "INTELLIGENT_TIERING"
+ | "ONEZONE_IA"
+ | "OUTPOSTS"
+ | "REDUCED_REDUNDANCY"
+ | "SNOW"
+ | "STANDARD_IA";
+
/**
* @deprecated The size of the internal buffer in bytes. Defaults to 5 MiB. use `partSize` and `queueSize` instead.
*/
diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig
index bc5fb49beb..b63322b9a8 100644
--- a/src/bun.js/rare_data.zig
+++ b/src/bun.js/rare_data.zig
@@ -440,12 +440,13 @@ pub fn nodeFSStatWatcherScheduler(rare: *RareData, vm: *JSC.VirtualMachine) *Sta
pub fn s3DefaultClient(rare: *RareData, globalThis: *JSC.JSGlobalObject) JSC.JSValue {
return rare.s3_default_client.get() orelse {
const vm = globalThis.bunVM();
- var aws_options = bun.S3.S3Credentials.getCredentialsWithOptions(vm.transpiler.env.getS3Credentials(), .{}, null, null, globalThis) catch bun.outOfMemory();
+ var aws_options = bun.S3.S3Credentials.getCredentialsWithOptions(vm.transpiler.env.getS3Credentials(), .{}, null, null, null, globalThis) catch bun.outOfMemory();
defer aws_options.deinit();
const client = JSC.WebCore.S3Client.new(.{
.credentials = aws_options.credentials.dupe(),
.options = aws_options.options,
.acl = aws_options.acl,
+ .storage_class = aws_options.storage_class,
});
const js_client = client.toJS(globalThis);
js_client.ensureStillAlive();
diff --git a/src/bun.js/webcore/S3Client.zig b/src/bun.js/webcore/S3Client.zig
index 014bfcff38..18d93dc0b7 100644
--- a/src/bun.js/webcore/S3Client.zig
+++ b/src/bun.js/webcore/S3Client.zig
@@ -94,17 +94,19 @@ pub const S3Client = struct {
credentials: *S3Credentials,
options: bun.S3.MultiPartUploadOptions = .{},
acl: ?bun.S3.ACL = null,
+ storage_class: ?bun.S3.StorageClass = null,
pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*@This() {
const arguments = callframe.arguments_old(1).slice();
var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments);
defer args.deinit();
- var aws_options = try S3Credentials.getCredentialsWithOptions(globalThis.bunVM().transpiler.env.getS3Credentials(), .{}, args.nextEat(), null, globalThis);
+ var aws_options = try S3Credentials.getCredentialsWithOptions(globalThis.bunVM().transpiler.env.getS3Credentials(), .{}, args.nextEat(), null, null, globalThis);
defer aws_options.deinit();
return S3Client.new(.{
.credentials = aws_options.credentials.dupe(),
.options = aws_options.options,
.acl = aws_options.acl,
+ .storage_class = aws_options.storage_class,
});
}
@@ -138,7 +140,7 @@ pub const S3Client = struct {
};
errdefer path.deinit();
const options = args.nextEat();
- var blob = Blob.new(try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl));
+ var blob = Blob.new(try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class));
blob.allocator = bun.default_allocator;
return blob.toJS(globalThis);
}
@@ -156,7 +158,7 @@ pub const S3Client = struct {
errdefer path.deinit();
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
return S3File.getPresignUrlFrom(&blob, globalThis, options);
}
@@ -173,7 +175,7 @@ pub const S3Client = struct {
};
errdefer path.deinit();
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
return S3File.S3BlobStatTask.exists(globalThis, &blob);
}
@@ -190,7 +192,7 @@ pub const S3Client = struct {
};
errdefer path.deinit();
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
return S3File.S3BlobStatTask.size(globalThis, &blob);
}
@@ -207,7 +209,7 @@ pub const S3Client = struct {
};
errdefer path.deinit();
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
return S3File.S3BlobStatTask.stat(globalThis, &blob);
}
@@ -225,7 +227,7 @@ pub const S3Client = struct {
};
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
var blob_internal: PathOrBlob = .{ .blob = blob };
return Blob.writeFileInternal(globalThis, &blob_internal, data, .{
@@ -243,7 +245,7 @@ pub const S3Client = struct {
};
errdefer path.deinit();
const options = args.nextEat();
- var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl);
+ var blob = try S3File.constructS3FileWithS3CredentialsAndOptions(globalThis, path, options, ptr.credentials, ptr.options, ptr.acl, ptr.storage_class);
defer blob.detach();
return blob.store.?.data.s3.unlink(blob.store.?, globalThis, options);
}
diff --git a/src/bun.js/webcore/S3File.zig b/src/bun.js/webcore/S3File.zig
index 7b02077244..6502d13a0e 100644
--- a/src/bun.js/webcore/S3File.zig
+++ b/src/bun.js/webcore/S3File.zig
@@ -227,8 +227,9 @@ pub fn constructS3FileWithS3CredentialsAndOptions(
default_credentials: *S3.S3Credentials,
default_options: bun.S3.MultiPartUploadOptions,
default_acl: ?bun.S3.ACL,
+ default_storage_class: ?bun.S3.StorageClass,
) bun.JSError!Blob {
- var aws_options = try S3.S3Credentials.getCredentialsWithOptions(default_credentials.*, default_options, options, default_acl, globalObject);
+ var aws_options = try S3.S3Credentials.getCredentialsWithOptions(default_credentials.*, default_options, options, default_acl, default_storage_class, globalObject);
defer aws_options.deinit();
const store = brk: {
@@ -241,6 +242,8 @@ pub fn constructS3FileWithS3CredentialsAndOptions(
errdefer store.deinit();
store.data.s3.options = aws_options.options;
store.data.s3.acl = aws_options.acl;
+ store.data.s3.storage_class = aws_options.storage_class;
+
var blob = Blob.initWithStore(store, globalObject);
if (options) |opts| {
if (opts.isObject()) {
@@ -276,12 +279,14 @@ pub fn constructS3FileWithS3Credentials(
options: ?JSC.JSValue,
existing_credentials: S3.S3Credentials,
) bun.JSError!Blob {
- var aws_options = try S3.S3Credentials.getCredentialsWithOptions(existing_credentials, .{}, options, null, globalObject);
+ var aws_options = try S3.S3Credentials.getCredentialsWithOptions(existing_credentials, .{}, options, null, null, globalObject);
defer aws_options.deinit();
const store = Blob.Store.initS3(path, null, aws_options.credentials, bun.default_allocator) catch bun.outOfMemory();
errdefer store.deinit();
store.data.s3.options = aws_options.options;
store.data.s3.acl = aws_options.acl;
+ store.data.s3.storage_class = aws_options.storage_class;
+
var blob = Blob.initWithStore(store, globalObject);
if (options) |opts| {
if (opts.isObject()) {
@@ -465,6 +470,7 @@ pub fn getPresignUrlFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, extra_opt
.path = path,
.method = method,
.acl = credentialsWithOptions.acl,
+ .storage_class = credentialsWithOptions.storage_class,
}, .{ .expires = expires }) catch |sign_err| {
return S3.throwSignError(sign_err, globalThis);
};
diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig
index 55687b1e76..1d7e0a1c6d 100644
--- a/src/bun.js/webcore/blob.zig
+++ b/src/bun.js/webcore/blob.zig
@@ -928,6 +928,7 @@ pub const Blob = struct {
destination_blob.contentTypeOrMimeType(),
aws_options.acl,
proxy_url,
+ aws_options.storage_class,
@ptrCast(&Wrapper.resolve),
Wrapper.new(.{
.promise = promise,
@@ -1056,6 +1057,7 @@ pub const Blob = struct {
ctx,
aws_options.options,
aws_options.acl,
+ aws_options.storage_class,
destination_blob.contentTypeOrMimeType(),
proxy_url,
null,
@@ -1098,6 +1100,7 @@ pub const Blob = struct {
destination_blob.contentTypeOrMimeType(),
aws_options.acl,
proxy_url,
+ aws_options.storage_class,
@ptrCast(&Wrapper.resolve),
Wrapper.new(.{
.store = store,
@@ -1121,6 +1124,7 @@ pub const Blob = struct {
ctx,
s3.options,
aws_options.acl,
+ aws_options.storage_class,
destination_blob.contentTypeOrMimeType(),
proxy_url,
null,
@@ -1310,6 +1314,7 @@ pub const Blob = struct {
globalThis,
aws_options.options,
aws_options.acl,
+ aws_options.storage_class,
destination_blob.contentTypeOrMimeType(),
proxy_url,
null,
@@ -1369,6 +1374,7 @@ pub const Blob = struct {
globalThis,
aws_options.options,
aws_options.acl,
+ aws_options.storage_class,
destination_blob.contentTypeOrMimeType(),
proxy_url,
null,
@@ -3507,6 +3513,8 @@ pub const Blob = struct {
credentials: ?*S3Credentials,
options: bun.S3.MultiPartUploadOptions = .{},
acl: ?S3.ACL = null,
+ storage_class: ?S3.StorageClass = null,
+
pub fn isSeekable(_: *const @This()) ?bool {
return true;
}
@@ -3517,7 +3525,7 @@ pub const Blob = struct {
}
pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!S3.S3CredentialsWithOptions {
- return S3Credentials.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, globalObject);
+ return S3Credentials.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, this.storage_class, globalObject);
}
pub fn path(this: *@This()) []const u8 {
@@ -4102,6 +4110,7 @@ pub const Blob = struct {
globalThis,
aws_options.options,
aws_options.acl,
+ aws_options.storage_class,
this.contentTypeOrMimeType(),
proxy_url,
null,
@@ -4339,6 +4348,7 @@ pub const Blob = struct {
credentialsWithOptions.options,
this.contentTypeOrMimeType(),
proxy_url,
+ credentialsWithOptions.storage_class,
);
}
}
@@ -4349,6 +4359,7 @@ pub const Blob = struct {
.{},
this.contentTypeOrMimeType(),
proxy_url,
+ null,
);
}
if (store.data != .file) {
diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig
index 9190b800bc..8ad9bb4d8e 100644
--- a/src/bun.js/webcore/response.zig
+++ b/src/bun.js/webcore/response.zig
@@ -3256,6 +3256,7 @@ pub const Fetch = struct {
.credentials = globalThis.bunVM().transpiler.env.getS3Credentials(),
.options = .{},
.acl = null,
+ .storage_class = null,
};
defer {
credentialsWithOptions.deinit();
@@ -3265,7 +3266,7 @@ pub const Fetch = struct {
if (try options.getTruthyComptime(globalThis, "s3")) |s3_options| {
if (s3_options.isObject()) {
s3_options.ensureStillAlive();
- credentialsWithOptions = try s3.S3Credentials.getCredentialsWithOptions(credentialsWithOptions.credentials, .{}, s3_options, null, globalThis);
+ credentialsWithOptions = try s3.S3Credentials.getCredentialsWithOptions(credentialsWithOptions.credentials, .{}, s3_options, null, null, globalThis);
}
}
}
@@ -3341,6 +3342,7 @@ pub const Fetch = struct {
globalThis,
credentialsWithOptions.options,
credentialsWithOptions.acl,
+ credentialsWithOptions.storage_class,
if (headers) |h| h.getContentType() else null,
proxy_url,
@ptrCast(&Wrapper.resolve),
diff --git a/src/s3/client.zig b/src/s3/client.zig
index 80231c3d87..6d527ff9f2 100644
--- a/src/s3/client.zig
+++ b/src/s3/client.zig
@@ -7,6 +7,7 @@ pub const ACL = @import("./acl.zig").ACL;
pub const S3HttpDownloadStreamingTask = @import("./download_stream.zig").S3HttpDownloadStreamingTask;
pub const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions;
pub const MultiPartUpload = @import("./multipart.zig").MultiPartUpload;
+pub const StorageClass = @import("./storage_class.zig").StorageClass;
pub const Error = @import("./error.zig");
pub const throwSignError = Error.throwSignError;
@@ -105,6 +106,7 @@ pub fn upload(
content_type: ?[]const u8,
acl: ?ACL,
proxy_url: ?[]const u8,
+ storage_class: ?StorageClass,
callback: *const fn (S3UploadResult, *anyopaque) void,
callback_context: *anyopaque,
) void {
@@ -115,6 +117,7 @@ pub fn upload(
.body = content,
.content_type = content_type,
.acl = acl,
+ .storage_class = storage_class,
}, .{ .upload = callback }, callback_context);
}
/// returns a writable stream that writes to the s3 path
@@ -125,6 +128,7 @@ pub fn writableStream(
options: MultiPartUploadOptions,
content_type: ?[]const u8,
proxy: ?[]const u8,
+ storage_class: ?StorageClass,
) bun.JSError!JSC.JSValue {
const Wrapper = struct {
pub fn callback(result: S3UploadResult, sink: *JSC.WebCore.NetworkSink) void {
@@ -158,6 +162,7 @@ pub fn writableStream(
.path = bun.default_allocator.dupe(u8, path) catch bun.outOfMemory(),
.proxy = if (proxy_url.len > 0) bun.default_allocator.dupe(u8, proxy_url) catch bun.outOfMemory() else "",
.content_type = if (content_type) |ct| bun.default_allocator.dupe(u8, ct) catch bun.outOfMemory() else null,
+ .storage_class = storage_class,
.callback = @ptrCast(&Wrapper.callback),
.callback_context = undefined,
@@ -290,6 +295,7 @@ pub fn uploadStream(
globalThis: *JSC.JSGlobalObject,
options: MultiPartUploadOptions,
acl: ?ACL,
+ storage_class: ?StorageClass,
content_type: ?[]const u8,
proxy: ?[]const u8,
callback: ?*const fn (S3UploadResult, *anyopaque) void,
@@ -333,6 +339,7 @@ pub fn uploadStream(
.state = .wait_stream_check,
.options = options,
.acl = acl,
+ .storage_class = storage_class,
.vm = JSC.VirtualMachine.get(),
});
diff --git a/src/s3/credentials.zig b/src/s3/credentials.zig
index b72c0b8c88..fdf2a45c3d 100644
--- a/src/s3/credentials.zig
+++ b/src/s3/credentials.zig
@@ -4,6 +4,8 @@ const std = @import("std");
const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions;
const ACL = @import("./acl.zig").ACL;
+const StorageClass = @import("./storage_class.zig").StorageClass;
+
const JSC = bun.JSC;
const RareData = JSC.RareData;
const strings = bun.strings;
@@ -16,7 +18,7 @@ pub const S3Credentials = struct {
endpoint: []const u8,
bucket: []const u8,
sessionToken: []const u8,
-
+ storage_class: ?StorageClass = null,
/// Important for MinIO support.
insecure_http: bool = false,
@@ -42,12 +44,13 @@ pub const S3Credentials = struct {
return hasher.final();
}
- pub fn getCredentialsWithOptions(this: S3Credentials, default_options: MultiPartUploadOptions, options: ?JSC.JSValue, default_acl: ?ACL, globalObject: *JSC.JSGlobalObject) bun.JSError!S3CredentialsWithOptions {
+ pub fn getCredentialsWithOptions(this: S3Credentials, default_options: MultiPartUploadOptions, options: ?JSC.JSValue, default_acl: ?ACL, default_storage_class: ?StorageClass, globalObject: *JSC.JSGlobalObject) bun.JSError!S3CredentialsWithOptions {
// get ENV config
var new_credentials = S3CredentialsWithOptions{
.credentials = this,
.options = default_options,
.acl = default_acl,
+ .storage_class = default_storage_class,
};
errdefer {
new_credentials.deinit();
@@ -197,6 +200,10 @@ pub const S3Credentials = struct {
if (try opts.getOptionalEnum(globalObject, "acl", ACL)) |acl| {
new_credentials.acl = acl;
}
+
+ if (try opts.getOptionalEnum(globalObject, "storageClass", StorageClass)) |storage_class| {
+ new_credentials.storage_class = storage_class;
+ }
}
}
return new_credentials;
@@ -313,7 +320,9 @@ pub const S3Credentials = struct {
content_disposition: []const u8 = "",
session_token: []const u8 = "",
acl: ?ACL = null,
- _headers: [7]picohttp.Header = .{
+ storage_class: ?StorageClass = null,
+ _headers: [8]picohttp.Header = .{
+ .{ .name = "", .value = "" },
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
@@ -375,6 +384,7 @@ pub const S3Credentials = struct {
search_params: ?[]const u8 = null,
content_disposition: ?[]const u8 = null,
acl: ?ACL = null,
+ storage_class: ?StorageClass = null,
};
pub fn guessRegion(endpoint: []const u8) []const u8 {
@@ -448,6 +458,8 @@ pub const S3Credentials = struct {
const acl: ?[]const u8 = if (signOptions.acl) |acl_value| acl_value.toString() else null;
+ const storage_class: ?[]const u8 = if (signOptions.storage_class) |storage_class| storage_class.toString() else null;
+
if (this.accessKeyId.len == 0 or this.secretAccessKey.len == 0) return error.MissingCredentials;
const signQuery = signQueryOption != null;
const expires = if (signQueryOption) |options| options.expires else 0;
@@ -519,32 +531,64 @@ pub const S3Credentials = struct {
const amz_day = amz_date[0..8];
const signed_headers = if (signQuery) "host" else brk: {
- if (acl != null) {
- if (content_disposition != null) {
- if (session_token != null) {
- break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ if (storage_class != null) {
+ if (acl != null) {
+ if (content_disposition != null) {
+ if (session_token != null) {
+ break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-storage-class;x-amz-security-token";
+ } else {
+ break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-storage-class";
+ }
} else {
- break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date";
+ if (session_token != null) {
+ break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-storage-class;x-amz-security-token";
+ } else {
+ break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-storage-class";
+ }
}
} else {
- if (session_token != null) {
- break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ if (content_disposition != null) {
+ if (session_token != null) {
+ break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class;x-amz-security-token";
+ } else {
+ break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class";
+ }
} else {
- break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date";
+ if (session_token != null) {
+ break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-storage-class;x-amz-security-token";
+ } else {
+ break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-storage-class";
+ }
}
}
} else {
- if (content_disposition != null) {
- if (session_token != null) {
- break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ if (acl != null) {
+ if (content_disposition != null) {
+ if (session_token != null) {
+ break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ } else {
+ break :brk "content-disposition;host;x-amz-acl;x-amz-content-sha256;x-amz-date";
+ }
} else {
- break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date";
+ if (session_token != null) {
+ break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ } else {
+ break :brk "host;x-amz-acl;x-amz-content-sha256;x-amz-date";
+ }
}
} else {
- if (session_token != null) {
- break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ if (content_disposition != null) {
+ if (session_token != null) {
+ break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ } else {
+ break :brk "content-disposition;host;x-amz-content-sha256;x-amz-date";
+ }
} else {
- break :brk "host;x-amz-content-sha256;x-amz-date";
+ if (session_token != null) {
+ break :brk "host;x-amz-content-sha256;x-amz-date;x-amz-security-token";
+ } else {
+ break :brk "host;x-amz-content-sha256;x-amz-date";
+ }
}
}
}
@@ -596,17 +640,33 @@ pub const S3Credentials = struct {
encoded_session_token = encodeURIComponent(token, &token_encoded_buffer, true) catch return error.InvalidSessionToken;
}
const canonical = brk_canonical: {
- if (acl) |acl_value| {
- if (encoded_session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ if (storage_class) |storage_class_value| {
+ if (acl) |acl_value| {
+ if (encoded_session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ if (encoded_session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nx-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nx-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ }
}
} else {
- if (encoded_session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ if (acl) |acl_value| {
+ if (encoded_session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ if (encoded_session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, host, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\nX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host\nhost:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, host, signed_headers, aws_content_hash });
+ }
}
}
};
@@ -616,65 +676,130 @@ pub const S3Credentials = struct {
const signValue = try std.fmt.bufPrint(&tmp_buffer, "AWS4-HMAC-SHA256\n{s}\n{s}/{s}/{s}/aws4_request\n{s}", .{ amz_date, amz_day, region, service_name, bun.fmt.bytesToHex(sha_digest[0..bun.sha.SHA256.digest], .lower) });
const signature = bun.hmac.generate(sigDateRegionServiceReq, signValue, .sha256, &hmac_sig_service) orelse return error.FailedToGenerateSignature;
- if (acl) |acl_value| {
- if (encoded_session_token) |token| {
- break :brk try std.fmt.allocPrint(
- bun.default_allocator,
- "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
- .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
- );
+
+ if (storage_class) |storage_class_value| {
+ if (acl) |acl_value| {
+ if (encoded_session_token) |token| {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Acl={s}&x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, acl_value, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ } else {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Acl={s}&x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, acl_value, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ }
} else {
- break :brk try std.fmt.allocPrint(
- bun.default_allocator,
- "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
- .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
- );
+ if (encoded_session_token) |token| {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ } else {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?x-amz-storage-class={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, storage_class_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ }
}
} else {
- if (encoded_session_token) |token| {
- break :brk try std.fmt.allocPrint(
- bun.default_allocator,
- "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
- .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
- );
+ if (acl) |acl_value| {
+ if (encoded_session_token) |token| {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ } else {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Acl={s}&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, acl_value, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ }
} else {
- break :brk try std.fmt.allocPrint(
- bun.default_allocator,
- "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
- .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
- );
+ if (encoded_session_token) |token| {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-Security-Token={s}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, token, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ } else {
+ break :brk try std.fmt.allocPrint(
+ bun.default_allocator,
+ "{s}://{s}{s}?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential={s}%2F{s}%2F{s}%2F{s}%2Faws4_request&X-Amz-Date={s}&X-Amz-Expires={}&X-Amz-SignedHeaders=host&X-Amz-Signature={s}",
+ .{ protocol, host, normalizedPath, this.accessKeyId, amz_day, region, service_name, amz_date, expires, bun.fmt.bytesToHex(signature[0..DIGESTED_HMAC_256_LEN], .lower) },
+ );
+ }
}
}
} else {
var encoded_content_disposition_buffer: [255]u8 = undefined;
const encoded_content_disposition: []const u8 = if (content_disposition) |cd| encodeURIComponent(cd, &encoded_content_disposition_buffer, true) catch return error.ContentTypeIsTooLong else "";
const canonical = brk_canonical: {
- if (acl) |acl_value| {
- if (content_disposition != null) {
- if (session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ if (storage_class) |storage_class_value| {
+ if (acl) |acl_value| {
+ if (content_disposition != null) {
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, storage_class_value, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, storage_class_value, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, storage_class_value, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, storage_class_value, signed_headers, aws_content_hash });
+ }
}
} else {
- if (session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ if (content_disposition != null) {
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, storage_class_value, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, storage_class_value, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, storage_class_value, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-storage-class:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, storage_class_value, signed_headers, aws_content_hash });
+ }
}
}
} else {
- if (content_disposition != null) {
- if (session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ if (acl) |acl_value| {
+ if (content_disposition != null) {
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-acl:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, acl_value, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ }
}
} else {
- if (session_token) |token| {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ if (content_disposition != null) {
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\ncontent-disposition:{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", encoded_content_disposition, host, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ }
} else {
- break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ if (session_token) |token| {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\nx-amz-security-token:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, token, signed_headers, aws_content_hash });
+ } else {
+ break :brk_canonical try std.fmt.bufPrint(&tmp_buffer, "{s}\n{s}\n{s}\nhost:{s}\nx-amz-content-sha256:{s}\nx-amz-date:{s}\n\n{s}\n{s}", .{ method_name, normalizedPath, if (search_params) |p| p[1..] else "", host, aws_content_hash, amz_date, signed_headers, aws_content_hash });
+ }
}
}
}
@@ -705,6 +830,7 @@ pub const S3Credentials = struct {
.authorization = "",
.acl = signOptions.acl,
.url = authorization,
+ .storage_class = signOptions.storage_class,
};
}
@@ -713,6 +839,7 @@ pub const S3Credentials = struct {
.host = host,
.authorization = authorization,
.acl = signOptions.acl,
+ .storage_class = signOptions.storage_class,
.url = try std.fmt.allocPrint(bun.default_allocator, "{s}://{s}{s}{s}", .{ protocol, host, normalizedPath, if (search_params) |s| s else "" }),
._headers = [_]picohttp.Header{
.{ .name = "x-amz-content-sha256", .value = aws_content_hash },
@@ -722,6 +849,7 @@ pub const S3Credentials = struct {
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
+ .{ .name = "", .value = "" },
},
._headers_len = 4,
};
@@ -745,6 +873,11 @@ pub const S3Credentials = struct {
result._headers_len += 1;
}
+ if (storage_class) |storage_class_value| {
+ result._headers[result._headers_len] = .{ .name = "x-amz-storage-class", .value = storage_class_value };
+ result._headers_len += 1;
+ }
+
return result;
}
};
@@ -753,6 +886,7 @@ pub const S3CredentialsWithOptions = struct {
credentials: S3Credentials,
options: MultiPartUploadOptions = .{},
acl: ?ACL = null,
+ storage_class: ?StorageClass = null,
/// indicates if the credentials have changed
changed_credentials: bool = false,
diff --git a/src/s3/multipart.zig b/src/s3/multipart.zig
index b34ea065d9..3dbadac73f 100644
--- a/src/s3/multipart.zig
+++ b/src/s3/multipart.zig
@@ -3,6 +3,7 @@ const bun = @import("root").bun;
const strings = bun.strings;
const S3Credentials = @import("./credentials.zig").S3Credentials;
const ACL = @import("./acl.zig").ACL;
+const Storageclass = @import("./storage_class.zig").StorageClass;
const JSC = bun.JSC;
const MultiPartUploadOptions = @import("./multipart_options.zig").MultiPartUploadOptions;
const S3SimpleRequest = @import("./simple_request.zig");
@@ -25,6 +26,7 @@ pub const MultiPartUpload = struct {
options: MultiPartUploadOptions = .{},
acl: ?ACL = null,
+ storage_class: ?Storageclass = null,
credentials: *S3Credentials,
poll_ref: bun.Async.KeepAlive = bun.Async.KeepAlive.init(),
vm: *JSC.VirtualMachine,
@@ -216,6 +218,7 @@ pub const MultiPartUpload = struct {
.body = this.buffered.items,
.content_type = this.content_type,
.acl = this.acl,
+ .storage_class = this.storage_class,
}, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this);
return;
@@ -457,6 +460,7 @@ pub const MultiPartUpload = struct {
.search_params = "?uploads=",
.content_type = this.content_type,
.acl = this.acl,
+ .storage_class = this.storage_class,
}, .{ .download = @ptrCast(&startMultiPartRequestResult) }, this);
} else if (this.state == .multipart_completed) {
part.start();
@@ -532,6 +536,7 @@ pub const MultiPartUpload = struct {
.body = this.buffered.items,
.content_type = this.content_type,
.acl = this.acl,
+ .storage_class = this.storage_class,
}, .{ .upload = @ptrCast(&singleSendUploadResponse) }, this);
} else {
// we need to split
diff --git a/src/s3/simple_request.zig b/src/s3/simple_request.zig
index d9a6891b7b..0b03fc79f9 100644
--- a/src/s3/simple_request.zig
+++ b/src/s3/simple_request.zig
@@ -8,6 +8,8 @@ const getSignErrorCodeAndMessage = @import("./error.zig").getSignErrorCodeAndMes
const S3Credentials = @import("./credentials.zig").S3Credentials;
const picohttp = bun.picohttp;
const ACL = @import("./acl.zig").ACL;
+const StorageClass = @import("./storage_class.zig").StorageClass;
+
pub const S3StatResult = union(enum) {
success: struct {
size: usize = 0,
@@ -333,6 +335,7 @@ pub const S3SimpleRequestOptions = struct {
proxy_url: ?[]const u8 = null,
range: ?[]const u8 = null,
acl: ?ACL = null,
+ storage_class: ?StorageClass = null,
};
pub fn executeSimpleS3Request(
@@ -347,6 +350,7 @@ pub fn executeSimpleS3Request(
.search_params = options.search_params,
.content_disposition = options.content_disposition,
.acl = options.acl,
+ .storage_class = options.storage_class,
}, null) catch |sign_err| {
if (options.range) |range_| bun.default_allocator.free(range_);
const error_code_and_message = getSignErrorCodeAndMessage(sign_err);
diff --git a/src/s3/storage_class.zig b/src/s3/storage_class.zig
new file mode 100644
index 0000000000..1ba9a1a7be
--- /dev/null
+++ b/src/s3/storage_class.zig
@@ -0,0 +1,45 @@
+const bun = @import("root").bun;
+
+pub const StorageClass = enum {
+ STANDARD,
+ STANDARD_IA,
+ INTELLIGENT_TIERING,
+ EXPRESS_ONEZONE,
+ ONEZONE_IA,
+ GLACIER,
+ GLACIER_IR,
+ REDUCED_REDUNDANCY,
+ OUTPOSTS,
+ DEEP_ARCHIVE,
+ SNOW,
+
+ pub fn toString(this: @This()) []const u8 {
+ return switch (this) {
+ .STANDARD => "STANDARD",
+ .STANDARD_IA => "STANDARD_IA",
+ .INTELLIGENT_TIERING => "INTELLIGENT_TIERING",
+ .EXPRESS_ONEZONE => "EXPRESS_ONEZONE",
+ .ONEZONE_IA => "ONEZONE_IA",
+ .GLACIER => "GLACIER",
+ .GLACIER_IR => "GLACIER_IR",
+ .REDUCED_REDUNDANCY => "REDUCED_REDUNDANCY",
+ .OUTPOSTS => "OUTPOSTS",
+ .DEEP_ARCHIVE => "DEEP_ARCHIVE",
+ .SNOW => "SNOW",
+ };
+ }
+
+ pub const Map = bun.ComptimeStringMap(StorageClass, .{
+ .{ "STANDARD", .STANDARD },
+ .{ "STANDARD_IA", .STANDARD_IA },
+ .{ "INTELLIGENT_TIERING", .INTELLIGENT_TIERING },
+ .{ "EXPRESS_ONEZONE", .EXPRESS_ONEZONE },
+ .{ "ONEZONE_IA", .ONEZONE_IA },
+ .{ "GLACIER", .GLACIER },
+ .{ "GLACIER_IR", .GLACIER_IR },
+ .{ "REDUCED_REDUNDANCY", .REDUCED_REDUNDANCY },
+ .{ "OUTPOSTS", .OUTPOSTS },
+ .{ "DEEP_ARCHIVE", .DEEP_ARCHIVE },
+ .{ "SNOW", .SNOW },
+ });
+};
diff --git a/test/js/bun/s3/s3-storage-class.test.ts b/test/js/bun/s3/s3-storage-class.test.ts
new file mode 100644
index 0000000000..f8a0897e27
--- /dev/null
+++ b/test/js/bun/s3/s3-storage-class.test.ts
@@ -0,0 +1,284 @@
+import { describe, it, expect } from "bun:test";
+import { s3, S3Client, type S3Options } from "bun";
+import { randomUUID } from "node:crypto";
+
+describe("s3 - Storage class", () => {
+ const s3Options: S3Options = {
+ accessKeyId: "test",
+ secretAccessKey: "test",
+ region: "eu-west-3",
+ bucket: "my_bucket",
+ };
+
+ it("should throw TypeError if storage class isnt one of enum", async () => {
+ try {
+ new S3Client({
+ ...s3Options,
+ endpoint: "anything",
+ // @ts-expect-error not an enum
+ storageClass: "INVALID_VALUE",
+ }).file("instance_file");
+
+ expect.unreachable();
+ } catch (e) {
+ expect(e).toBeInstanceOf(TypeError);
+ }
+ });
+
+ it("should work with static .file() method", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "STANDARD_IA";
+
+ await S3Client.file("from_static_file", {
+ ...s3Options,
+ endpoint: server.url.href,
+ storageClass,
+ }).write("This is a good file");
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+
+ it("should work with static .write() method", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "REDUCED_REDUNDANCY";
+
+ await S3Client.write("from_static_write", "This is a good file", {
+ ...s3Options,
+ endpoint: server.url.href,
+ storageClass,
+ });
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+
+ it("should work with static presign", () => {
+ const storageClass = "DEEP_ARCHIVE";
+ const result = S3Client.file("awsome_file").presign({
+ ...s3Options,
+ storageClass,
+ });
+
+ expect(result).toInclude(`x-amz-storage-class=${storageClass}`);
+ });
+
+ it("should work with instance options + .file() method", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "ONEZONE_IA";
+
+ const s3 = new S3Client({
+ ...s3Options,
+ endpoint: server.url.href,
+ storageClass,
+ });
+
+ const file = s3.file("instance_file");
+
+ await file.write("Some content");
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+
+ it("should work with instance .file() method + options", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "SNOW";
+
+ const file = new S3Client({
+ ...s3Options,
+ endpoint: server.url.href,
+ }).file("instance_file", { storageClass });
+
+ await file.write("Some content");
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+
+ it("should work with writer + options on small file", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "SNOW";
+
+ const s3 = new S3Client({
+ ...s3Options,
+ endpoint: server.url.href,
+ });
+
+ const writer = s3.file("file_from_writer").writer({ storageClass });
+
+ const smallFile = Buffer.alloc(10 * 1024);
+
+ for (let i = 0; i < 10; i++) {
+ await writer.write(smallFile);
+ }
+ await writer.end();
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+
+ it(
+ "should work with writer + options on big file",
+ async () => {
+ let reqHeaders: Headers | undefined = undefined;
+
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ const isCreateMultipartUploadRequest = req.method == "POST" && req.url.includes("?uploads=");
+
+ if (isCreateMultipartUploadRequest) {
+ reqHeaders = req.headers;
+ return new Response(
+ `
+ my_bucket
+ file_from_writer
+ ${randomUUID()}
+ `,
+ {
+ headers: {
+ "Content-Type": "text/xml",
+ },
+ status: 200,
+ },
+ );
+ }
+
+ const isCompleteMultipartUploadRequets = req.method == "POST" && req.url.includes("uploadId=");
+
+ if (isCompleteMultipartUploadRequets) {
+ return new Response(
+ `
+ http://my_bucket.s3..amazonaws.com/file_from_writer
+ my_bucket
+ file_from_writer
+ "f9a5ddddf9e0fcbd05c15bb44b389171-20"
+`,
+ {
+ headers: {
+ "Content-Type": "text/xml",
+ },
+ status: 200,
+ },
+ );
+ }
+
+ return new Response(undefined, { status: 200, headers: { "Etag": `"f9a5ddddf9e0fcbd05c15bb44b389171-20"` } });
+ },
+ });
+
+ const storageClass = "SNOW";
+
+ const s3 = new S3Client({
+ ...s3Options,
+ endpoint: server.url.href,
+ });
+
+ const writer = s3.file("file_from_writer").writer({
+ storageClass,
+ queueSize: 10,
+ partSize: 5 * 1024,
+ });
+
+ const bigFile = Buffer.alloc(10 * 1024 * 1024);
+
+ for (let i = 0; i < 10; i++) {
+ await writer.write(bigFile);
+ }
+ await writer.end();
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ },
+ { timeout: 20_000 },
+ );
+
+ it("should work with default s3 instance", async () => {
+ let reqHeaders: Headers | undefined = undefined;
+ using server = Bun.serve({
+ port: 0,
+ async fetch(req) {
+ reqHeaders = req.headers;
+ return new Response("", {
+ headers: {
+ "Content-Type": "text/plain",
+ },
+ status: 200,
+ });
+ },
+ });
+
+ const storageClass = "INTELLIGENT_TIERING";
+
+ await s3.file("my_file", { ...s3Options, storageClass, endpoint: server.url.href }).write("any thing");
+
+ expect(reqHeaders!.get("authorization")).toInclude("x-amz-storage-class");
+ expect(reqHeaders!.get("x-amz-storage-class")).toBe(storageClass);
+ });
+});
diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts
index 5e955032fe..a1c280b266 100644
--- a/test/js/bun/s3/s3.test.ts
+++ b/test/js/bun/s3/s3.test.ts
@@ -996,6 +996,22 @@ for (let credentials of allCredentials) {
expect(url.includes("X-Amz-SignedHeaders")).toBe(true);
});
+ it("should work with storage class", async () => {
+ const s3file = s3("s3://bucket/credentials-test", s3Options);
+ const url = s3file.presign({
+ expiresIn: 10,
+ storageClass: "GLACIER_IR",
+ });
+ expect(url).toBeDefined();
+ expect(url.includes("X-Amz-Expires=10")).toBe(true);
+ expect(url.includes("x-amz-storage-class=GLACIER_IR")).toBe(true);
+ expect(url.includes("X-Amz-Date")).toBe(true);
+ expect(url.includes("X-Amz-Signature")).toBe(true);
+ expect(url.includes("X-Amz-Credential")).toBe(true);
+ expect(url.includes("X-Amz-Algorithm")).toBe(true);
+ expect(url.includes("X-Amz-SignedHeaders")).toBe(true);
+ });
+
it("s3().presign() should work", async () => {
const url = s3("s3://bucket/credentials-test", s3Options).presign({
expiresIn: 10,