diff --git a/src/sql/postgres/DataCell.zig b/src/sql/postgres/DataCell.zig index 70e4a1361f..ecf1bce52c 100644 --- a/src/sql/postgres/DataCell.zig +++ b/src/sql/postgres/DataCell.zig @@ -714,6 +714,9 @@ pub const DataCell = extern struct { } }, .date, .timestamp, .timestamptz => |tag| { + if (bytes.len == 0) { + return DataCell{ .tag = .null, .value = .{ .null = 0 } }; + } if (binary and bytes.len == 8) { switch (tag) { .timestamptz => return DataCell{ .tag = .date_with_time_zone, .value = .{ .date_with_time_zone = types.date.fromBinary(bytes) } }, @@ -721,6 +724,9 @@ pub const DataCell = extern struct { else => unreachable, } } else { + if (bun.strings.eqlCaseInsensitiveASCII(bytes, "NULL", true)) { + return DataCell{ .tag = .null, .value = .{ .null = 0 } }; + } var str = bun.String.init(bytes); defer str.deref(); return DataCell{ .tag = .date, .value = .{ .date = try str.parseDate(globalObject) } }; @@ -1039,6 +1045,16 @@ pub const DataCell = extern struct { } fn putImpl(this: *Putter, index: u32, optional_bytes: ?*Data, comptime is_raw: bool) !bool { + // Bounds check to prevent crash when fields/list arrays are empty + if (index >= this.fields.len) { + debug("putImpl: index {d} >= fields.len {d}, ignoring extra field", .{ index, this.fields.len }); + return false; + } + if (index >= this.list.len) { + debug("putImpl: index {d} >= list.len {d}, ignoring extra field", .{ index, this.list.len }); + return false; + } + const field = &this.fields[index]; const oid = field.type_oid; debug("index: {d}, oid: {d}", .{ index, oid }); diff --git a/test/no-validate-exceptions.txt b/test/no-validate-exceptions.txt index ee15469c43..2c968e852b 100644 --- a/test/no-validate-exceptions.txt +++ b/test/no-validate-exceptions.txt @@ -272,3 +272,4 @@ test/js/web/crypto/web-crypto.test.ts test/js/node/crypto/node-crypto.test.js test/js/third_party/pg/pg.test.ts test/regression/issue/01466.test.ts +test/regression/issue/21311.test.ts \ No newline at end of file diff --git a/test/regression/issue/21311.test.ts b/test/regression/issue/21311.test.ts new file mode 100644 index 0000000000..214ea70879 --- /dev/null +++ b/test/regression/issue/21311.test.ts @@ -0,0 +1,107 @@ +import { SQL } from "bun"; +import { describe, expect, test } from "bun:test"; +import { getSecret } from "harness"; +const postgres = (...args) => new SQL(...args); +const databaseUrl = getSecret("TLS_POSTGRES_DATABASE_URL"); + +describe("postgres batch insert crash fix #21311", () => { + test("should handle large batch inserts without crashing", async () => { + const sql = postgres(databaseUrl!); + try { + // Create a test table + await sql`DROP TABLE IF EXISTS test_batch_21311`; + await sql`CREATE TABLE test_batch_21311 ( + id serial PRIMARY KEY, + data VARCHAR(100) + );`; + + // Generate a large batch of data to insert + const batchSize = 100; + const values = Array.from({ length: batchSize }, (_, i) => `('batch_data_${i}')`).join(", "); + + // This query would previously crash with "index out of bounds: index 0, len 0" + // on Windows when the fields metadata wasn't properly initialized + const insertQuery = `INSERT INTO test_batch_21311 (data) VALUES ${values} RETURNING id, data`; + + const results = await sql.unsafe(insertQuery); + + expect(results).toHaveLength(batchSize); + expect(results[0]).toHaveProperty("id"); + expect(results[0]).toHaveProperty("data"); + expect(results[0].data).toBe("batch_data_0"); + expect(results[batchSize - 1].data).toBe(`batch_data_${batchSize - 1}`); + + // Cleanup + await sql`DROP TABLE test_batch_21311`; + } finally { + await sql.end(); + } + }); + + test("should handle empty result sets without crashing", async () => { + const sql = postgres(databaseUrl!); + try { + // Create a temporary table that will return no results + await sql`DROP TABLE IF EXISTS test_empty_21311`; + await sql`CREATE TABLE test_empty_21311 ( + id serial PRIMARY KEY, + data VARCHAR(100) + );`; + + // Query that returns no rows - this tests the empty fields scenario + const results = await sql`SELECT * FROM test_empty_21311 WHERE id = -1`; + + expect(results).toHaveLength(0); + + // Cleanup + await sql`DROP TABLE test_empty_21311`; + } finally { + await sql.end(); + } + }); + + test("should handle mixed date formats in batch operations", async () => { + const sql = postgres(databaseUrl!); + try { + // Create test table + await sql`DROP TABLE IF EXISTS test_concurrent_21311`; + await sql`CREATE TABLE test_concurrent_21311 ( + id serial PRIMARY KEY, + should_be_null INT, + date DATE NULL + );`; + + // Run multiple concurrent batch operations + // This tests potential race conditions in field metadata setup + const concurrentOperations = Array.from({ length: 100 }, async (_, threadId) => { + const batchSize = 20; + const values = Array.from( + { length: batchSize }, + (_, i) => `(${i % 2 === 0 ? 1 : 0}, ${i % 2 === 0 ? "'infinity'::date" : "NULL"})`, + ).join(", "); + + const insertQuery = `INSERT INTO test_concurrent_21311 (should_be_null, date) VALUES ${values} RETURNING id, should_be_null, date`; + return sql.unsafe(insertQuery); + }); + + await Promise.all(concurrentOperations); + + // Run multiple concurrent queries + + const allQueryResults = await sql`SELECT * FROM test_concurrent_21311`; + allQueryResults.forEach((row, i) => { + expect(row.should_be_null).toBeNumber(); + if (row.should_be_null) { + expect(row.date).toBeDefined(); + expect(row.date?.getTime()).toBeNaN(); + } else { + expect(row.date).toBeNull(); + } + }); + // Cleanup + await sql`DROP TABLE test_concurrent_21311`; + } finally { + await sql.end(); + } + }); +});