From 596e83c9182bd0de92c12d2aa853a20033f148a3 Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Thu, 8 Jan 2026 19:46:06 -0800 Subject: [PATCH] fix: correct logic bugs in libarchive, s3 credentials, and postgres bindings (#25905) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - **libarchive.zig:110**: Fix self-assignment bug where `this.pos` was assigned to itself instead of `new_pos` - **s3/credentials.zig:165,176,199**: Fix impossible range checks - `and` should be `or` for pageSize, partSize, and retry validation (a value cannot be both less than MIN and greater than MAX simultaneously) - **postgres.zig:14**: Fix copy-paste error where createConnection function was internally named "createQuery" ## Test plan - [ ] Verify S3 credential validation now properly rejects out-of-range values for pageSize, partSize, and retry - [ ] Verify libarchive seek operations work correctly - [ ] Verify postgres createConnection function has correct internal name 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Co-authored-by: Jarred Sumner Co-authored-by: Claude Bot --- src/libarchive/libarchive.zig | 2 +- src/s3/credentials.zig | 6 +++--- src/sql/postgres.zig | 2 +- test/js/bun/s3/s3-storage-class.test.ts | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index db2c787ef2..8291f9a012 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -107,7 +107,7 @@ pub const BufferReadStream = struct { const proposed = pos + offset; const new_pos = @min(@max(proposed, 0), buflen - 1); - this.pos = @as(usize, @intCast(this.pos)); + this.pos = @as(usize, @intCast(new_pos)); return new_pos - pos; } diff --git a/src/s3/credentials.zig b/src/s3/credentials.zig index 602f7f4399..48f25d8168 100644 --- a/src/s3/credentials.zig +++ b/src/s3/credentials.zig @@ -162,7 +162,7 @@ pub const S3Credentials = struct { } if (try opts.getOptional(globalObject, "pageSize", i64)) |pageSize| { - if (pageSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE and pageSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { + if (pageSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE or pageSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { return globalObject.throwRangeError(pageSize, .{ .min = @intCast(MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE), .max = @intCast(MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE), @@ -173,7 +173,7 @@ pub const S3Credentials = struct { } } if (try opts.getOptional(globalObject, "partSize", i64)) |partSize| { - if (partSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE and partSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { + if (partSize < MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE or partSize > MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { return globalObject.throwRangeError(partSize, .{ .min = @intCast(MultiPartUploadOptions.MIN_SINGLE_UPLOAD_SIZE), .max = @intCast(MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE), @@ -196,7 +196,7 @@ pub const S3Credentials = struct { } if (try opts.getOptional(globalObject, "retry", i32)) |retry| { - if (retry < 0 and retry > 255) { + if (retry < 0 or retry > 255) { return globalObject.throwRangeError(retry, .{ .min = 0, .max = 255, diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index 6deeeb8da0..29de836e54 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -11,7 +11,7 @@ pub fn createBinding(globalObject: *jsc.JSGlobalObject) JSValue { binding.put( globalObject, ZigString.static("createConnection"), - jsc.JSFunction.create(globalObject, "createQuery", PostgresSQLConnection.call, 2, .{}), + jsc.JSFunction.create(globalObject, "createConnection", PostgresSQLConnection.call, 2, .{}), ); return binding; diff --git a/test/js/bun/s3/s3-storage-class.test.ts b/test/js/bun/s3/s3-storage-class.test.ts index 285bc032c9..a83ded2799 100644 --- a/test/js/bun/s3/s3-storage-class.test.ts +++ b/test/js/bun/s3/s3-storage-class.test.ts @@ -243,7 +243,7 @@ describe("s3 - Storage class", () => { const writer = s3.file("file_from_writer").writer({ storageClass, queueSize: 10, - partSize: 5 * 1024, + partSize: 5 * 1024 * 1024, // 5MB minimum }); const bigFile = Buffer.alloc(10 * 1024 * 1024);