feat(s3): add Content-Encoding header support for S3 uploads (#26149)

## Summary

Add support for setting the `Content-Encoding` header in S3 `.write()`
and `.writer()` calls, following the same pattern as
`Content-Disposition`.

This allows users to specify the encoding of uploaded content:

```typescript
// With .write()
await s3file.write("compressed data", { contentEncoding: "gzip" });

// With .writer()
const writer = s3file.writer({ contentEncoding: "gzip" });
writer.write("compressed data");
await writer.end();

// With bucket.write()
await bucket.write("key", data, { contentEncoding: "br" });
```

## Implementation

- Extended `SignedHeaders.Key` from 6 bits to 7 bits (64→128
combinations) to accommodate the new header
- Added `content_encoding` to `S3CredentialsWithOptions`, `SignOptions`,
and `SignResult` structs
- Updated `CanonicalRequest` format strings to include
`content-encoding` in AWS SigV4 signing
- Added `getContentEncoding()` method to `Headers` for fetch-based S3
uploads
- Expanded `_headers` array from 9 to 10 elements
- Pass `content_encoding` through all S3 upload paths (upload,
uploadStream, writableStream)

## Test plan

- Added tests for "should be able to set content-encoding" 
- Added tests for "should be able to set content-encoding in writer"
- Tests verify the Content-Encoding header is properly set on uploaded
objects via presigned URL fetch
- All 4 new tests pass with `bun bd test` and fail with
`USE_SYSTEM_BUN=1` (confirming the feature is new)

## Changelog

> Describe your changes in 1-2 sentences. These will be featured on
[bun.sh/blog](https://bun.sh/blog) and Bun's release notes.

Added `contentEncoding` option to S3 `.write()` and `.writer()` methods,
allowing users to set the `Content-Encoding` header when uploading
objects.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Ciro Spaciari
2026-01-15 18:09:33 -08:00
committed by GitHub
parent 2a483631fb
commit 5d3f37d7ae
8 changed files with 141 additions and 12 deletions

View File

@@ -968,6 +968,7 @@ fn writeFileWithEmptySourceToDestination(ctx: *jsc.JSGlobalObject, destination_b
"", "",
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
aws_options.acl, aws_options.acl,
proxy_url, proxy_url,
aws_options.storage_class, aws_options.storage_class,
@@ -1120,6 +1121,7 @@ pub fn writeFileWithSourceDestination(ctx: *jsc.JSGlobalObject, source_blob: *Bl
aws_options.storage_class, aws_options.storage_class,
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
proxy_url, proxy_url,
aws_options.request_payer, aws_options.request_payer,
null, null,
@@ -1160,6 +1162,7 @@ pub fn writeFileWithSourceDestination(ctx: *jsc.JSGlobalObject, source_blob: *Bl
bytes.slice(), bytes.slice(),
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
aws_options.acl, aws_options.acl,
proxy_url, proxy_url,
aws_options.storage_class, aws_options.storage_class,
@@ -1191,6 +1194,7 @@ pub fn writeFileWithSourceDestination(ctx: *jsc.JSGlobalObject, source_blob: *Bl
aws_options.storage_class, aws_options.storage_class,
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
proxy_url, proxy_url,
aws_options.request_payer, aws_options.request_payer,
null, null,
@@ -1398,6 +1402,7 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr
aws_options.storage_class, aws_options.storage_class,
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
proxy_url, proxy_url,
aws_options.request_payer, aws_options.request_payer,
null, null,
@@ -1460,6 +1465,7 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr
aws_options.storage_class, aws_options.storage_class,
destination_blob.contentTypeOrMimeType(), destination_blob.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
proxy_url, proxy_url,
aws_options.request_payer, aws_options.request_payer,
null, null,
@@ -2438,6 +2444,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *jsc.JSGlobalObject, re
aws_options.storage_class, aws_options.storage_class,
this.contentTypeOrMimeType(), this.contentTypeOrMimeType(),
aws_options.content_disposition, aws_options.content_disposition,
aws_options.content_encoding,
proxy_url, proxy_url,
aws_options.request_payer, aws_options.request_payer,
null, null,
@@ -2674,7 +2681,16 @@ pub fn getWriter(
} }
content_disposition_str = try content_disposition.toSlice(globalThis, bun.default_allocator); content_disposition_str = try content_disposition.toSlice(globalThis, bun.default_allocator);
} }
const credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); var content_encoding_str: ?ZigString.Slice = null;
defer if (content_encoding_str) |ce| ce.deinit();
if (try options.getTruthy(globalThis, "contentEncoding")) |content_encoding| {
if (!content_encoding.isString()) {
return globalThis.throwInvalidArgumentType("write", "options.contentEncoding", "string");
}
content_encoding_str = try content_encoding.toSlice(globalThis, bun.default_allocator);
}
var credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis);
defer credentialsWithOptions.deinit();
return try S3.writableStream( return try S3.writableStream(
credentialsWithOptions.credentials.dupe(), credentialsWithOptions.credentials.dupe(),
path, path,
@@ -2682,6 +2698,7 @@ pub fn getWriter(
credentialsWithOptions.options, credentialsWithOptions.options,
this.contentTypeOrMimeType(), this.contentTypeOrMimeType(),
if (content_disposition_str) |cd| cd.slice() else null, if (content_disposition_str) |cd| cd.slice() else null,
if (content_encoding_str) |ce| ce.slice() else null,
proxy_url, proxy_url,
credentialsWithOptions.storage_class, credentialsWithOptions.storage_class,
credentialsWithOptions.request_payer, credentialsWithOptions.request_payer,
@@ -2695,6 +2712,7 @@ pub fn getWriter(
.{}, .{},
this.contentTypeOrMimeType(), this.contentTypeOrMimeType(),
null, null,
null,
proxy_url, proxy_url,
null, null,
s3.request_payer, s3.request_payer,

View File

@@ -1319,6 +1319,7 @@ fn fetchImpl(
credentialsWithOptions.storage_class, credentialsWithOptions.storage_class,
if (headers) |h| (h.getContentType()) else null, if (headers) |h| (h.getContentType()) else null,
if (headers) |h| h.getContentDisposition() else null, if (headers) |h| h.getContentDisposition() else null,
if (headers) |h| h.getContentEncoding() else null,
proxy_url, proxy_url,
credentialsWithOptions.request_payer, credentialsWithOptions.request_payer,
@ptrCast(&Wrapper.resolve), @ptrCast(&Wrapper.resolve),
@@ -1360,7 +1361,7 @@ fn fetchImpl(
} }
const content_type = if (headers) |h| (h.getContentType()) else null; const content_type = if (headers) |h| (h.getContentType()) else null;
var header_buffer: [10]picohttp.Header = undefined; var header_buffer: [s3.S3Credentials.SignResult.MAX_HEADERS + 1]picohttp.Header = undefined;
if (range) |range_| { if (range) |range_| {
const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ });

View File

@@ -79,6 +79,9 @@ pub fn deinit(this: *Headers) void {
pub fn getContentDisposition(this: *const Headers) ?[]const u8 { pub fn getContentDisposition(this: *const Headers) ?[]const u8 {
return this.get("content-disposition"); return this.get("content-disposition");
} }
pub fn getContentEncoding(this: *const Headers) ?[]const u8 {
return this.get("content-encoding");
}
pub fn getContentType(this: *const Headers) ?[]const u8 { pub fn getContentType(this: *const Headers) ?[]const u8 {
return this.get("content-type"); return this.get("content-type");
} }

View File

@@ -238,6 +238,7 @@ pub fn upload(
content: []const u8, content: []const u8,
content_type: ?[]const u8, content_type: ?[]const u8,
content_disposition: ?[]const u8, content_disposition: ?[]const u8,
content_encoding: ?[]const u8,
acl: ?ACL, acl: ?ACL,
proxy_url: ?[]const u8, proxy_url: ?[]const u8,
storage_class: ?StorageClass, storage_class: ?StorageClass,
@@ -252,6 +253,7 @@ pub fn upload(
.body = content, .body = content,
.content_type = content_type, .content_type = content_type,
.content_disposition = content_disposition, .content_disposition = content_disposition,
.content_encoding = content_encoding,
.acl = acl, .acl = acl,
.storage_class = storage_class, .storage_class = storage_class,
.request_payer = request_payer, .request_payer = request_payer,
@@ -265,6 +267,7 @@ pub fn writableStream(
options: MultiPartUploadOptions, options: MultiPartUploadOptions,
content_type: ?[]const u8, content_type: ?[]const u8,
content_disposition: ?[]const u8, content_disposition: ?[]const u8,
content_encoding: ?[]const u8,
proxy: ?[]const u8, proxy: ?[]const u8,
storage_class: ?StorageClass, storage_class: ?StorageClass,
request_payer: bool, request_payer: bool,
@@ -310,6 +313,7 @@ pub fn writableStream(
.proxy = if (proxy_url.len > 0) bun.handleOom(bun.default_allocator.dupe(u8, proxy_url)) else "", .proxy = if (proxy_url.len > 0) bun.handleOom(bun.default_allocator.dupe(u8, proxy_url)) else "",
.content_type = if (content_type) |ct| bun.handleOom(bun.default_allocator.dupe(u8, ct)) else null, .content_type = if (content_type) |ct| bun.handleOom(bun.default_allocator.dupe(u8, ct)) else null,
.content_disposition = if (content_disposition) |cd| bun.handleOom(bun.default_allocator.dupe(u8, cd)) else null, .content_disposition = if (content_disposition) |cd| bun.handleOom(bun.default_allocator.dupe(u8, cd)) else null,
.content_encoding = if (content_encoding) |ce| bun.handleOom(bun.default_allocator.dupe(u8, ce)) else null,
.storage_class = storage_class, .storage_class = storage_class,
.request_payer = request_payer, .request_payer = request_payer,
@@ -451,6 +455,7 @@ pub fn uploadStream(
storage_class: ?StorageClass, storage_class: ?StorageClass,
content_type: ?[]const u8, content_type: ?[]const u8,
content_disposition: ?[]const u8, content_disposition: ?[]const u8,
content_encoding: ?[]const u8,
proxy: ?[]const u8, proxy: ?[]const u8,
request_payer: bool, request_payer: bool,
callback: ?*const fn (S3UploadResult, *anyopaque) void, callback: ?*const fn (S3UploadResult, *anyopaque) void,
@@ -489,6 +494,7 @@ pub fn uploadStream(
.proxy = if (proxy_url.len > 0) bun.handleOom(bun.default_allocator.dupe(u8, proxy_url)) else "", .proxy = if (proxy_url.len > 0) bun.handleOom(bun.default_allocator.dupe(u8, proxy_url)) else "",
.content_type = if (content_type) |ct| bun.handleOom(bun.default_allocator.dupe(u8, ct)) else null, .content_type = if (content_type) |ct| bun.handleOom(bun.default_allocator.dupe(u8, ct)) else null,
.content_disposition = if (content_disposition) |cd| bun.handleOom(bun.default_allocator.dupe(u8, cd)) else null, .content_disposition = if (content_disposition) |cd| bun.handleOom(bun.default_allocator.dupe(u8, cd)) else null,
.content_encoding = if (content_encoding) |ce| bun.handleOom(bun.default_allocator.dupe(u8, ce)) else null,
.callback = @ptrCast(&S3UploadStreamWrapper.resolve), .callback = @ptrCast(&S3UploadStreamWrapper.resolve),
.callback_context = undefined, .callback_context = undefined,
.globalThis = globalThis, .globalThis = globalThis,
@@ -559,7 +565,7 @@ pub fn downloadStream(
return; return;
}; };
var header_buffer: [10]picohttp.Header = undefined; var header_buffer: [S3Credentials.SignResult.MAX_HEADERS + 1]picohttp.Header = undefined;
const headers = brk: { const headers = brk: {
if (range) |range_| { if (range) |range_| {
const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ });

View File

@@ -244,6 +244,21 @@ pub const S3Credentials = struct {
} }
} }
if (try opts.getTruthyComptime(globalObject, "contentEncoding")) |js_value| {
if (!js_value.isEmptyOrUndefinedOrNull()) {
if (js_value.isString()) {
const str = try bun.String.fromJS(js_value, globalObject);
defer str.deref();
if (str.tag != .Empty and str.tag != .Dead) {
new_credentials._contentEncodingSlice = str.toUTF8(bun.default_allocator);
new_credentials.content_encoding = new_credentials._contentEncodingSlice.?.slice();
}
} else {
return globalObject.throwInvalidArgumentTypeValue("contentEncoding", "string", js_value);
}
}
}
if (try opts.getBooleanStrict(globalObject, "requestPayer")) |request_payer| { if (try opts.getBooleanStrict(globalObject, "requestPayer")) |request_payer| {
new_credentials.request_payer = request_payer; new_credentials.request_payer = request_payer;
} }
@@ -363,12 +378,15 @@ pub const S3Credentials = struct {
url: []const u8, url: []const u8,
content_disposition: []const u8 = "", content_disposition: []const u8 = "",
content_encoding: []const u8 = "",
content_md5: []const u8 = "", content_md5: []const u8 = "",
session_token: []const u8 = "", session_token: []const u8 = "",
acl: ?ACL = null, acl: ?ACL = null,
storage_class: ?StorageClass = null, storage_class: ?StorageClass = null,
request_payer: bool = false, request_payer: bool = false,
_headers: [9]picohttp.Header = .{ _headers: [MAX_HEADERS]picohttp.Header = .{
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
@@ -381,6 +399,8 @@ pub const S3Credentials = struct {
}, },
_headers_len: u8 = 0, _headers_len: u8 = 0,
pub const MAX_HEADERS = 11;
pub fn headers(this: *const @This()) []const picohttp.Header { pub fn headers(this: *const @This()) []const picohttp.Header {
return this._headers[0..this._headers_len]; return this._headers[0..this._headers_len];
} }
@@ -408,6 +428,10 @@ pub const S3Credentials = struct {
bun.freeSensitive(bun.default_allocator, this.content_disposition); bun.freeSensitive(bun.default_allocator, this.content_disposition);
} }
if (this.content_encoding.len > 0) {
bun.freeSensitive(bun.default_allocator, this.content_encoding);
}
if (this.host.len > 0) { if (this.host.len > 0) {
bun.freeSensitive(bun.default_allocator, this.host); bun.freeSensitive(bun.default_allocator, this.host);
} }
@@ -437,6 +461,7 @@ pub const S3Credentials = struct {
search_params: ?[]const u8 = null, search_params: ?[]const u8 = null,
content_disposition: ?[]const u8 = null, content_disposition: ?[]const u8 = null,
content_type: ?[]const u8 = null, content_type: ?[]const u8 = null,
content_encoding: ?[]const u8 = null,
acl: ?ACL = null, acl: ?ACL = null,
storage_class: ?StorageClass = null, storage_class: ?StorageClass = null,
request_payer: bool = false, request_payer: bool = false,
@@ -551,6 +576,10 @@ pub const S3Credentials = struct {
if (content_type != null and content_type.?.len == 0) { if (content_type != null and content_type.?.len == 0) {
content_type = null; content_type = null;
} }
var content_encoding = signOptions.content_encoding;
if (content_encoding != null and content_encoding.?.len == 0) {
content_encoding = null;
}
const session_token: ?[]const u8 = if (this.sessionToken.len == 0) null else this.sessionToken; const session_token: ?[]const u8 = if (this.sessionToken.len == 0) null else this.sessionToken;
const acl: ?[]const u8 = if (signOptions.acl) |acl_value| acl_value.toString() else null; const acl: ?[]const u8 = if (signOptions.acl) |acl_value| acl_value.toString() else null;
@@ -660,6 +689,7 @@ pub const S3Credentials = struct {
const request_payer = signOptions.request_payer; const request_payer = signOptions.request_payer;
const header_key = SignedHeaders.Key{ const header_key = SignedHeaders.Key{
.content_disposition = content_disposition != null, .content_disposition = content_disposition != null,
.content_encoding = content_encoding != null,
.content_md5 = content_md5 != null, .content_md5 = content_md5 != null,
.acl = acl != null, .acl = acl != null,
.request_payer = request_payer, .request_payer = request_payer,
@@ -849,6 +879,7 @@ pub const S3Credentials = struct {
normalizedPath, normalizedPath,
if (search_params) |p| p[1..] else "", if (search_params) |p| p[1..] else "",
content_disposition, content_disposition,
content_encoding,
content_md5, content_md5,
host, host,
acl, acl,
@@ -906,6 +937,8 @@ pub const S3Credentials = struct {
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
.{ .name = "", .value = "" }, .{ .name = "", .value = "" },
.{ .name = "", .value = "" },
.{ .name = "", .value = "" },
}, },
._headers_len = 4, ._headers_len = 4,
}; };
@@ -933,6 +966,13 @@ pub const S3Credentials = struct {
result._headers_len += 1; result._headers_len += 1;
} }
if (content_encoding) |ce| {
const content_encoding_value = bun.handleOom(bun.default_allocator.dupe(u8, ce));
result.content_encoding = content_encoding_value;
result._headers[result._headers_len] = .{ .name = "content-encoding", .value = content_encoding_value };
result._headers_len += 1;
}
if (content_md5) |c_md5| { if (content_md5) |c_md5| {
const content_md5_value = bun.handleOom(bun.default_allocator.dupe(u8, c_md5)); const content_md5_value = bun.handleOom(bun.default_allocator.dupe(u8, c_md5));
result.content_md5 = content_md5_value; result.content_md5 = content_md5_value;
@@ -956,6 +996,7 @@ pub const S3CredentialsWithOptions = struct {
storage_class: ?StorageClass = null, storage_class: ?StorageClass = null,
content_disposition: ?[]const u8 = null, content_disposition: ?[]const u8 = null,
content_type: ?[]const u8 = null, content_type: ?[]const u8 = null,
content_encoding: ?[]const u8 = null,
/// indicates if requester pays for the request (for requester pays buckets) /// indicates if requester pays for the request (for requester pays buckets)
request_payer: bool = false, request_payer: bool = false,
/// indicates if the credentials have changed /// indicates if the credentials have changed
@@ -970,6 +1011,7 @@ pub const S3CredentialsWithOptions = struct {
_sessionTokenSlice: ?jsc.ZigString.Slice = null, _sessionTokenSlice: ?jsc.ZigString.Slice = null,
_contentDispositionSlice: ?jsc.ZigString.Slice = null, _contentDispositionSlice: ?jsc.ZigString.Slice = null,
_contentTypeSlice: ?jsc.ZigString.Slice = null, _contentTypeSlice: ?jsc.ZigString.Slice = null,
_contentEncodingSlice: ?jsc.ZigString.Slice = null,
pub fn deinit(this: *@This()) void { pub fn deinit(this: *@This()) void {
if (this._accessKeyIdSlice) |slice| slice.deinit(); if (this._accessKeyIdSlice) |slice| slice.deinit();
@@ -980,14 +1022,16 @@ pub const S3CredentialsWithOptions = struct {
if (this._sessionTokenSlice) |slice| slice.deinit(); if (this._sessionTokenSlice) |slice| slice.deinit();
if (this._contentDispositionSlice) |slice| slice.deinit(); if (this._contentDispositionSlice) |slice| slice.deinit();
if (this._contentTypeSlice) |slice| slice.deinit(); if (this._contentTypeSlice) |slice| slice.deinit();
if (this._contentEncodingSlice) |slice| slice.deinit();
} }
}; };
/// Comptime-generated lookup table for signed headers strings. /// Comptime-generated lookup table for signed headers strings.
/// Headers must be in alphabetical order per AWS Signature V4 spec. /// Headers must be in alphabetical order per AWS Signature V4 spec.
const SignedHeaders = struct { const SignedHeaders = struct {
const Key = packed struct(u6) { const Key = packed struct(u7) {
content_disposition: bool, content_disposition: bool,
content_encoding: bool,
content_md5: bool, content_md5: bool,
acl: bool, acl: bool,
request_payer: bool, request_payer: bool,
@@ -997,6 +1041,7 @@ const SignedHeaders = struct {
fn generate(comptime key: Key) []const u8 { fn generate(comptime key: Key) []const u8 {
return (if (key.content_disposition) "content-disposition;" else "") ++ return (if (key.content_disposition) "content-disposition;" else "") ++
(if (key.content_encoding) "content-encoding;" else "") ++
(if (key.content_md5) "content-md5;" else "") ++ (if (key.content_md5) "content-md5;" else "") ++
"host;" ++ "host;" ++
(if (key.acl) "x-amz-acl;" else "") ++ (if (key.acl) "x-amz-acl;" else "") ++
@@ -1007,15 +1052,15 @@ const SignedHeaders = struct {
} }
const table = init: { const table = init: {
var t: [64][]const u8 = undefined; var t: [128][]const u8 = undefined;
for (0..64) |i| { for (0..128) |i| {
t[i] = generate(@bitCast(@as(u6, @intCast(i)))); t[i] = generate(@bitCast(@as(u7, @intCast(i))));
} }
break :init t; break :init t;
}; };
pub fn get(key: Key) []const u8 { pub fn get(key: Key) []const u8 {
return table[@as(u6, @bitCast(key))]; return table[@as(u7, @bitCast(key))];
} }
}; };
@@ -1025,6 +1070,7 @@ const CanonicalRequest = struct {
fn fmtString(comptime key: SignedHeaders.Key) []const u8 { fn fmtString(comptime key: SignedHeaders.Key) []const u8 {
return "{s}\n{s}\n{s}\n" ++ // method, path, query return "{s}\n{s}\n{s}\n" ++ // method, path, query
(if (key.content_disposition) "content-disposition:{s}\n" else "") ++ (if (key.content_disposition) "content-disposition:{s}\n" else "") ++
(if (key.content_encoding) "content-encoding:{s}\n" else "") ++
(if (key.content_md5) "content-md5:{s}\n" else "") ++ (if (key.content_md5) "content-md5:{s}\n" else "") ++
"host:{s}\n" ++ "host:{s}\n" ++
(if (key.acl) "x-amz-acl:{s}\n" else "") ++ (if (key.acl) "x-amz-acl:{s}\n" else "") ++
@@ -1042,6 +1088,7 @@ const CanonicalRequest = struct {
path: []const u8, path: []const u8,
query: []const u8, query: []const u8,
content_disposition: ?[]const u8, content_disposition: ?[]const u8,
content_encoding: ?[]const u8,
content_md5: ?[]const u8, content_md5: ?[]const u8,
host: []const u8, host: []const u8,
acl: ?[]const u8, acl: ?[]const u8,
@@ -1053,6 +1100,7 @@ const CanonicalRequest = struct {
) error{NoSpaceLeft}![]u8 { ) error{NoSpaceLeft}![]u8 {
return std.fmt.bufPrint(buf, fmtString(key), .{ method, path, query } ++ return std.fmt.bufPrint(buf, fmtString(key), .{ method, path, query } ++
(if (key.content_disposition) .{content_disposition.?} else .{}) ++ (if (key.content_disposition) .{content_disposition.?} else .{}) ++
(if (key.content_encoding) .{content_encoding.?} else .{}) ++
(if (key.content_md5) .{content_md5.?} else .{}) ++ (if (key.content_md5) .{content_md5.?} else .{}) ++
.{host} ++ .{host} ++
(if (key.acl) .{acl.?} else .{}) ++ (if (key.acl) .{acl.?} else .{}) ++
@@ -1069,6 +1117,7 @@ const CanonicalRequest = struct {
path: []const u8, path: []const u8,
query: []const u8, query: []const u8,
content_disposition: ?[]const u8, content_disposition: ?[]const u8,
content_encoding: ?[]const u8,
content_md5: ?[]const u8, content_md5: ?[]const u8,
host: []const u8, host: []const u8,
acl: ?[]const u8, acl: ?[]const u8,
@@ -1079,14 +1128,15 @@ const CanonicalRequest = struct {
signed_headers: []const u8, signed_headers: []const u8,
) error{NoSpaceLeft}![]u8 { ) error{NoSpaceLeft}![]u8 {
// Dispatch to the right comptime-specialized function based on runtime key // Dispatch to the right comptime-specialized function based on runtime key
return switch (@as(u6, @bitCast(key))) { return switch (@as(u7, @bitCast(key))) {
inline 0...63 => |idx| formatForKey( inline 0...127 => |idx| formatForKey(
@bitCast(idx), @bitCast(idx),
buf, buf,
method, method,
path, path,
query, query,
content_disposition, content_disposition,
content_encoding,
content_md5, content_md5,
host, host,
acl, acl,

View File

@@ -117,6 +117,7 @@ pub const MultiPartUpload = struct {
proxy: []const u8, proxy: []const u8,
content_type: ?[]const u8 = null, content_type: ?[]const u8 = null,
content_disposition: ?[]const u8 = null, content_disposition: ?[]const u8 = null,
content_encoding: ?[]const u8 = null,
upload_id: []const u8 = "", upload_id: []const u8 = "",
uploadid_buffer: bun.MutableString = .{ .allocator = bun.default_allocator, .list = .{} }, uploadid_buffer: bun.MutableString = .{ .allocator = bun.default_allocator, .list = .{} },
@@ -284,6 +285,11 @@ pub const MultiPartUpload = struct {
bun.default_allocator.free(cd); bun.default_allocator.free(cd);
} }
} }
if (this.content_encoding) |ce| {
if (ce.len > 0) {
bun.default_allocator.free(ce);
}
}
this.credentials.deref(); this.credentials.deref();
this.uploadid_buffer.deinit(); this.uploadid_buffer.deinit();
for (this.multipart_etags.items) |tag| { for (this.multipart_etags.items) |tag| {
@@ -310,6 +316,7 @@ pub const MultiPartUpload = struct {
.body = this.buffered.slice(), .body = this.buffered.slice(),
.content_type = this.content_type, .content_type = this.content_type,
.content_disposition = this.content_disposition, .content_disposition = this.content_disposition,
.content_encoding = this.content_encoding,
.acl = this.acl, .acl = this.acl,
.storage_class = this.storage_class, .storage_class = this.storage_class,
.request_payer = this.request_payer, .request_payer = this.request_payer,
@@ -602,6 +609,7 @@ pub const MultiPartUpload = struct {
.search_params = "?uploads=", .search_params = "?uploads=",
.content_type = this.content_type, .content_type = this.content_type,
.content_disposition = this.content_disposition, .content_disposition = this.content_disposition,
.content_encoding = this.content_encoding,
.acl = this.acl, .acl = this.acl,
.storage_class = this.storage_class, .storage_class = this.storage_class,
.request_payer = this.request_payer, .request_payer = this.request_payer,
@@ -680,6 +688,7 @@ pub const MultiPartUpload = struct {
.body = this.buffered.slice(), .body = this.buffered.slice(),
.content_type = this.content_type, .content_type = this.content_type,
.content_disposition = this.content_disposition, .content_disposition = this.content_disposition,
.content_encoding = this.content_encoding,
.acl = this.acl, .acl = this.acl,
.storage_class = this.storage_class, .storage_class = this.storage_class,
.request_payer = this.request_payer, .request_payer = this.request_payer,

View File

@@ -351,6 +351,7 @@ pub const S3SimpleRequestOptions = struct {
search_params: ?[]const u8 = null, search_params: ?[]const u8 = null,
content_type: ?[]const u8 = null, content_type: ?[]const u8 = null,
content_disposition: ?[]const u8 = null, content_disposition: ?[]const u8 = null,
content_encoding: ?[]const u8 = null,
// http request options // http request options
body: []const u8, body: []const u8,
@@ -372,6 +373,7 @@ pub fn executeSimpleS3Request(
.method = options.method, .method = options.method,
.search_params = options.search_params, .search_params = options.search_params,
.content_disposition = options.content_disposition, .content_disposition = options.content_disposition,
.content_encoding = options.content_encoding,
.acl = options.acl, .acl = options.acl,
.storage_class = options.storage_class, .storage_class = options.storage_class,
.request_payer = options.request_payer, .request_payer = options.request_payer,
@@ -383,7 +385,7 @@ pub fn executeSimpleS3Request(
}; };
const headers = brk: { const headers = brk: {
var header_buffer: [10]picohttp.Header = undefined; var header_buffer: [S3Credentials.SignResult.MAX_HEADERS + 1]picohttp.Header = undefined;
if (options.range) |range_| { if (options.range) |range_| {
const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ });
break :brk bun.handleOom(bun.http.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator)); break :brk bun.handleOom(bun.http.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator));

View File

@@ -445,6 +445,46 @@ for (let credentials of allCredentials) {
} }
}); });
it("should be able to set content-encoding", async () => {
await using tmpfile = await tmp();
{
const s3file = bucket.file(tmpfile.name, options!);
await s3file.write("Hello Bun!", { contentEncoding: "gzip" });
// Use decompress: false since content isn't actually gzip-compressed
const response = await fetch(s3file.presign(), { decompress: false });
expect(response.headers.get("content-encoding")).toBe("gzip");
}
{
const s3file = bucket.file(tmpfile.name, options!);
await s3file.write("Hello Bun!", { contentEncoding: "br" });
// Use decompress: false since content isn't actually br-compressed
const response = await fetch(s3file.presign(), { decompress: false });
expect(response.headers.get("content-encoding")).toBe("br");
}
{
await bucket.write(tmpfile.name, "Hello Bun!", {
...options,
contentEncoding: "identity",
});
const response = await fetch(bucket.file(tmpfile.name, options!).presign(), { decompress: false });
expect(response.headers.get("content-encoding")).toBe("identity");
}
});
it("should be able to set content-encoding in writer", async () => {
await using tmpfile = await tmp();
{
const s3file = bucket.file(tmpfile.name, options!);
const writer = s3file.writer({
contentEncoding: "gzip",
});
writer.write("Hello Bun!!");
await writer.end();
// Use decompress: false since content isn't actually gzip-compressed
const response = await fetch(s3file.presign(), { decompress: false });
expect(response.headers.get("content-encoding")).toBe("gzip");
}
});
it("should be able to upload large files using bucket.write + readable Request", async () => { it("should be able to upload large files using bucket.write + readable Request", async () => {
await using tmpfile = await tmp(); await using tmpfile = await tmp();
{ {