mirror of
https://github.com/oven-sh/bun
synced 2026-02-10 19:08:50 +00:00
fix(sql): resolve MySQL connection pool hang with pool size > 1
When a prepared statement completes, the code was calling queue.advance() to process pending queries, but the data written to the buffer was not being flushed. This caused a hang because: 1. The auto-flusher was registered in onData's defer block 2. But event_loop.exit() ran BEFORE registerAutoFlusher() 3. So the deferred task queue was drained before the auto-flusher was registered 4. The new data written by queue.advance() would never be flushed The fix replaces queue.advance() with flushQueue() in several places: - checkIfPreparedStatementIsDone() - main fix for the hang - handleAuth() OK case - for consistency - handleAuth() success auth case - for consistency - handlePreparedStatement() ERROR case - for consistency flushQueue() calls flushData() after queue.advance() to ensure any data written by newly executed queries is actually sent to the server. This is a follow-up to #26030, which fixed the same pattern for pool size = 1. The issue persisted for pool size > 1 because the auto-flusher timing is different when multiple connections are involved. Fixes #26235 Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -498,9 +498,9 @@ pub fn handleAuth(this: *MySQLConnection, comptime Context: type, reader: NewRea
|
||||
|
||||
this.#status_flags = ok.status_flags;
|
||||
this.#flags.is_ready_for_query = true;
|
||||
const connection = this.getJSConnection();
|
||||
this.queue.markAsReadyForQuery();
|
||||
this.queue.advance(connection);
|
||||
// Use flushQueue to ensure any pending queries are executed and flushed
|
||||
this.flushQueue() catch {};
|
||||
},
|
||||
|
||||
@intFromEnum(PacketType.ERROR) => {
|
||||
@@ -535,7 +535,8 @@ pub fn handleAuth(this: *MySQLConnection, comptime Context: type, reader: NewRea
|
||||
|
||||
this.#flags.is_ready_for_query = true;
|
||||
this.queue.markAsReadyForQuery();
|
||||
this.queue.advance(this.getJSConnection());
|
||||
// Use flushQueue to ensure any pending queries are executed and flushed
|
||||
this.flushQueue() catch {};
|
||||
},
|
||||
.continue_auth => {
|
||||
debug("continue auth", .{});
|
||||
@@ -837,7 +838,11 @@ fn checkIfPreparedStatementIsDone(this: *MySQLConnection, statement: *MySQLState
|
||||
this.queue.markAsReadyForQuery();
|
||||
this.queue.markAsPrepared();
|
||||
statement.reset();
|
||||
this.queue.advance(this.getJSConnection());
|
||||
// Use flushQueue instead of just advance to ensure any data written
|
||||
// by queries added during advance is actually sent.
|
||||
// This fixes a hang condition where the auto flusher may not be
|
||||
// registered in time when a prepared statement completes.
|
||||
this.flushQueue() catch {};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -909,7 +914,8 @@ pub fn handlePreparedStatement(this: *MySQLConnection, comptime Context: type, r
|
||||
defer err.deinit();
|
||||
const connection = this.getJSConnection();
|
||||
defer {
|
||||
this.queue.advance(connection);
|
||||
// Use flushQueue to ensure any pending queries are executed and flushed
|
||||
this.flushQueue() catch {};
|
||||
}
|
||||
this.#flags.is_ready_for_query = true;
|
||||
statement.status = .failed;
|
||||
|
||||
165
test/regression/issue/26235.test.ts
Normal file
165
test/regression/issue/26235.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { SQL, randomUUIDv7 } from "bun";
|
||||
import { beforeEach, expect, test } from "bun:test";
|
||||
import { describeWithContainer } from "harness";
|
||||
|
||||
describeWithContainer(
|
||||
"mysql",
|
||||
{
|
||||
image: "mysql_plain",
|
||||
env: {},
|
||||
args: [],
|
||||
},
|
||||
container => {
|
||||
// Use pool size > 1 to reproduce the issue from #26235
|
||||
// The same pattern with max: 1 passes (covered by #26030 test)
|
||||
const getOptions = () => ({
|
||||
url: `mysql://root@${container.host}:${container.port}/bun_sql_test`,
|
||||
max: 2, // Pool size > 1 triggers the hang
|
||||
bigint: true,
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await container.ready;
|
||||
});
|
||||
|
||||
// Regression test for https://github.com/oven-sh/bun/issues/26235
|
||||
// MySQL connection pool hangs during sequential transactions when pool size > 1
|
||||
// This is a follow-up to #26030 which was fixed for pool size 1
|
||||
test("Sequential transactions with INSERT and returned SELECT should not hang (pool size > 1)", async () => {
|
||||
await using sql = new SQL(getOptions());
|
||||
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
|
||||
|
||||
// Create a table similar to the reproduction case
|
||||
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
contract_name VARCHAR(255),
|
||||
amount INT
|
||||
)`;
|
||||
|
||||
try {
|
||||
const rows = [
|
||||
{ contract_name: "Contract A", amount: 100000 },
|
||||
{ contract_name: "Contract B", amount: 200000 },
|
||||
{ contract_name: "Contract C", amount: 300000 },
|
||||
];
|
||||
|
||||
const contractIds: number[] = [];
|
||||
|
||||
for (const row of rows) {
|
||||
// This is the pattern from the bug report:
|
||||
// - INSERT is awaited
|
||||
// - SELECT LAST_INSERT_ID() is returned as array (not awaited individually)
|
||||
const [[result]] = await sql.begin(async tx => {
|
||||
await tx`
|
||||
INSERT INTO ${sql(random_name)} (contract_name, amount)
|
||||
VALUES (${row.contract_name}, ${row.amount})
|
||||
`;
|
||||
// Return array with non-awaited query - this triggers the hang
|
||||
return [tx`SELECT LAST_INSERT_ID() as id`];
|
||||
});
|
||||
|
||||
contractIds.push(Number(result.id));
|
||||
}
|
||||
|
||||
// Verify all transactions completed
|
||||
expect(contractIds.length).toBe(3);
|
||||
expect(contractIds[0]).toBe(1);
|
||||
expect(contractIds[1]).toBe(2);
|
||||
expect(contractIds[2]).toBe(3);
|
||||
|
||||
// Verify data in database
|
||||
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
|
||||
expect(Number(count[0].count)).toBe(3);
|
||||
} finally {
|
||||
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
|
||||
}
|
||||
});
|
||||
|
||||
test("Sequential transactions with returned array of multiple queries (pool size > 1)", async () => {
|
||||
await using sql = new SQL(getOptions());
|
||||
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
|
||||
|
||||
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
value INT
|
||||
)`;
|
||||
|
||||
try {
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const results = await sql.begin(async tx => {
|
||||
await tx`INSERT INTO ${sql(random_name)} (value) VALUES (${i * 10})`;
|
||||
// Return multiple queries as array
|
||||
return [tx`SELECT LAST_INSERT_ID() as id`, tx`SELECT COUNT(*) as count FROM ${sql(random_name)}`];
|
||||
});
|
||||
|
||||
expect(results.length).toBe(2);
|
||||
}
|
||||
|
||||
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
|
||||
expect(Number(count[0].count)).toBe(3);
|
||||
} finally {
|
||||
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
|
||||
}
|
||||
});
|
||||
|
||||
test("Many sequential transactions with awaited INSERT and returned SELECT (pool size > 1)", async () => {
|
||||
await using sql = new SQL(getOptions());
|
||||
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
|
||||
|
||||
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
)`;
|
||||
|
||||
try {
|
||||
// Multiple sequential transactions with awaited INSERT and returned SELECT
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const [[result]] = await sql.begin(async tx => {
|
||||
// First insert
|
||||
await tx`INSERT INTO ${sql(random_name)} (name) VALUES (${"item_" + i})`;
|
||||
// Return array with SELECT
|
||||
return [tx`SELECT LAST_INSERT_ID() as id`];
|
||||
});
|
||||
|
||||
expect(Number(result.id)).toBe(i + 1);
|
||||
}
|
||||
|
||||
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
|
||||
expect(Number(count[0].count)).toBe(5);
|
||||
} finally {
|
||||
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
|
||||
}
|
||||
});
|
||||
|
||||
// Test with larger pool size to ensure the fix works for various pool sizes
|
||||
test("Sequential transactions with pool size > 2", async () => {
|
||||
await using sql = new SQL({
|
||||
url: `mysql://root@${container.host}:${container.port}/bun_sql_test`,
|
||||
max: 5,
|
||||
bigint: true,
|
||||
});
|
||||
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
|
||||
|
||||
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
|
||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
name VARCHAR(255)
|
||||
)`;
|
||||
|
||||
try {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const [[result]] = await sql.begin(async tx => {
|
||||
await tx`INSERT INTO ${sql(random_name)} (name) VALUES (${"item_" + i})`;
|
||||
return [tx`SELECT LAST_INSERT_ID() as id`];
|
||||
});
|
||||
|
||||
expect(Number(result.id)).toBe(i + 1);
|
||||
}
|
||||
|
||||
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
|
||||
expect(Number(count[0].count)).toBe(10);
|
||||
} finally {
|
||||
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
Reference in New Issue
Block a user