Compare commits

..

1 Commits

Author SHA1 Message Date
Claude Bot
742bc513cb fix(sql): validate array type parameter to prevent SQL injection
The `sql.array(values, type)` function interpolated the user-provided
type string directly into the SQL query without validation, allowing
SQL injection via crafted type names like `INT); DROP TABLE users--`.

Add character validation in `getArrayType()` to reject type names
containing characters outside [a-zA-Z0-9_ .], which covers all valid
PostgreSQL type names (including schema-qualified names like
`myschema.INTEGER`) while blocking injection payloads. Uses
`$ERR_INVALID_ARG_VALUE` for consistency with the rest of the codebase.

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-12 04:53:43 +00:00
5 changed files with 142 additions and 158 deletions

View File

@@ -193,14 +193,6 @@ pub fn installWithManager(
had_any_diffs = manager.summary.hasDiffs();
// When --frozen-lockfile is set and the only diffs are removed
// workspace dependencies (e.g. from `turbo prune`), the lockfile
// is a superset of what's needed. Treat this as no diff so the
// lockfile stays intact and the frozen-lockfile check passes.
if (had_any_diffs and manager.options.enable.frozen_lockfile and manager.summary.hasOnlyRemovals()) {
had_any_diffs = false;
}
if (!had_any_diffs) {
// always grab latest scripts for root package
var builder_ = manager.lockfile.stringBuilder();

View File

@@ -551,19 +551,6 @@ pub fn Package(comptime SemverIntType: type) type {
this.removed_trusted_dependencies.count() > 0 or
this.patched_dependencies_changed;
}
/// Returns true when the only difference is removed dependencies
/// (no additions, updates, or other changes). This is the case when
/// workspaces have been pruned (e.g., by `turbo prune`) — the lockfile
/// is a superset of what's needed and should be accepted by --frozen-lockfile.
pub inline fn hasOnlyRemovals(this: Summary) bool {
return this.remove > 0 and
this.add == 0 and this.update == 0 and
!this.overrides_changed and !this.catalogs_changed and
this.added_trusted_dependencies.count() == 0 and
this.removed_trusted_dependencies.count() == 0 and
!this.patched_dependencies_changed;
}
};
pub fn generate(

View File

@@ -204,13 +204,47 @@ function arrayValueSerializer(type: ArrayType, is_numeric: boolean, is_json: boo
return `"${arrayEscape(JSON.stringify(value))}"`;
}
}
function validateArrayTypeName(type: string): void {
if (type.length === 0) {
throw $ERR_INVALID_ARG_VALUE("type", type, "must not be empty");
}
// Support schema-qualified names like "myschema.INTEGER" by splitting on dots
// and validating each segment individually.
const segments = type.split(".");
const lastIdx = segments.length - 1;
for (let s = 0; s <= lastIdx; s++) {
const seg = segments[s];
if (seg.length === 0) {
throw $ERR_INVALID_ARG_VALUE("type", type, "must not contain empty segments");
}
for (let i = 0; i < seg.length; i++) {
const c = seg.charCodeAt(i);
if (
(c >= 65 && c <= 90) || // A-Z
(c >= 97 && c <= 122) || // a-z
(c >= 48 && c <= 57) || // 0-9
c === 95 // _
) {
continue;
}
// Only the last segment may contain spaces (for "DOUBLE PRECISION")
if (c === 32 && s === lastIdx) {
continue;
}
throw $ERR_INVALID_ARG_VALUE("type", type, "contains invalid characters");
}
}
}
function getArrayType(typeNameOrID: number | ArrayType | undefined = undefined): ArrayType {
const typeOfType = typeof typeNameOrID;
if (typeOfType === "number") {
return getPostgresArrayType(typeNameOrID as number) ?? "JSON";
}
if (typeOfType === "string") {
return (typeNameOrID as string)?.toUpperCase();
const upper = (typeNameOrID as string).toUpperCase();
validateArrayTypeName(upper);
return upper;
}
// default to JSON so we accept most of the types
return "JSON";

View File

@@ -0,0 +1,107 @@
import { sql } from "bun";
import { describe, expect, test } from "bun:test";
// This test validates that sql.array() rejects malicious type parameters
// that could lead to SQL injection via the array type interpolation in
// normalizeQuery (src/js/internal/sql/postgres.ts line 1382).
//
// The vulnerability: sql.array(values, type) interpolates `type` directly
// into the query string as `$N::TYPE[]` without validation.
describe("sql.array type parameter validation", () => {
test("sql.array rejects type with SQL injection payload (semicolon)", () => {
expect(() => {
sql.array([1, 2, 3], "INT); DROP TABLE users--" as any);
}).toThrow();
});
test("sql.array rejects type with UNION injection", () => {
expect(() => {
sql.array([1, 2, 3], "INT[] UNION SELECT password FROM users--" as any);
}).toThrow();
});
test("sql.array rejects type with subquery injection", () => {
expect(() => {
sql.array([1, 2, 3], "INT[] (SELECT 1)" as any);
}).toThrow();
});
test("sql.array rejects type with parentheses", () => {
expect(() => {
sql.array([1, 2, 3], "INT()" as any);
}).toThrow();
});
test("sql.array rejects type with single quotes", () => {
expect(() => {
sql.array([1, 2, 3], "INT' OR '1'='1" as any);
}).toThrow();
});
test("sql.array rejects type with double quotes", () => {
expect(() => {
sql.array([1, 2, 3], 'INT" OR "1"="1' as any);
}).toThrow();
});
test("sql.array rejects empty type", () => {
expect(() => {
sql.array([1, 2, 3], "" as any);
}).toThrow();
});
test("sql.array rejects type with empty segment (leading dot)", () => {
expect(() => {
sql.array([1, 2, 3], ".INTEGER" as any);
}).toThrow();
});
test("sql.array rejects type with empty segment (trailing dot)", () => {
expect(() => {
sql.array([1, 2, 3], "myschema." as any);
}).toThrow();
});
test("sql.array rejects type with empty segment (consecutive dots)", () => {
expect(() => {
sql.array([1, 2, 3], "myschema..INTEGER" as any);
}).toThrow();
});
test("sql.array rejects space in schema segment", () => {
expect(() => {
sql.array([1, 2, 3], "my schema.INTEGER" as any);
}).toThrow();
});
test("sql.array accepts valid types", () => {
expect(() => sql.array([1, 2], "INTEGER")).not.toThrow();
expect(() => sql.array([1, 2], "INT")).not.toThrow();
expect(() => sql.array([1, 2], "BIGINT")).not.toThrow();
expect(() => sql.array(["a", "b"], "TEXT")).not.toThrow();
expect(() => sql.array(["a", "b"], "VARCHAR")).not.toThrow();
expect(() => sql.array([true, false], "BOOLEAN")).not.toThrow();
expect(() => sql.array([1.5, 2.5], "DOUBLE PRECISION")).not.toThrow();
expect(() => sql.array([1, 2], "INT2VECTOR")).not.toThrow();
expect(() => sql.array(["{}", "[]"], "JSON")).not.toThrow();
expect(() => sql.array(["{}", "[]"], "JSONB")).not.toThrow();
});
test("sql.array accepts lowercase valid types", () => {
expect(() => sql.array([1, 2], "integer")).not.toThrow();
expect(() => sql.array([1, 2], "int")).not.toThrow();
expect(() => sql.array(["a", "b"], "text")).not.toThrow();
expect(() => sql.array([1.5, 2.5], "double precision")).not.toThrow();
});
test("sql.array accepts schema-qualified type names", () => {
expect(() => sql.array([1, 2], "myschema.INTEGER" as any)).not.toThrow();
expect(() => sql.array([1, 2], "pg_catalog.int4" as any)).not.toThrow();
expect(() => sql.array([1, 2], "public.my_type" as any)).not.toThrow();
});
test("sql.array accepts schema-qualified type with space in last segment", () => {
expect(() => sql.array([1, 2], "myschema.DOUBLE PRECISION" as any)).not.toThrow();
});
});

View File

@@ -1,136 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
import { join } from "path";
// Test for https://github.com/oven-sh/bun/issues/26973
// `bun install --frozen-lockfile` should succeed on a pruned monorepo
// (e.g. output of `turbo prune --docker`) where some workspaces are removed
// but the lockfile is a superset of what's needed.
test("frozen-lockfile succeeds on pruned monorepo with subset of workspaces", async () => {
// Step 1: Create a full monorepo and generate a lockfile
const fullDir = tempDirWithFiles("full-monorepo", {
"package.json": JSON.stringify({
name: "test-monorepo",
workspaces: ["packages/*", "apps/*"],
}),
"packages/shared/package.json": JSON.stringify({
name: "@test/shared",
version: "1.0.0",
}),
"packages/utils/package.json": JSON.stringify({
name: "@test/utils",
version: "1.0.0",
}),
"apps/web/package.json": JSON.stringify({
name: "@test/web",
version: "1.0.0",
dependencies: {
"@test/shared": "workspace:*",
},
}),
"apps/api/package.json": JSON.stringify({
name: "@test/api",
version: "1.0.0",
dependencies: {
"@test/utils": "workspace:*",
},
}),
});
// Generate a lockfile with the full set of workspaces
const installResult = Bun.spawnSync({
cmd: [bunExe(), "install", "--save-text-lockfile", "--ignore-scripts"],
cwd: fullDir,
env: bunEnv,
});
expect(installResult.exitCode).toBe(0);
const lockfileContent = await Bun.file(join(fullDir, "bun.lock")).text();
// Step 2: Create a pruned monorepo (only @test/web and its dependency @test/shared)
// but use the FULL lockfile from the original monorepo
const prunedDir = tempDirWithFiles("pruned-monorepo", {
"package.json": JSON.stringify({
name: "test-monorepo",
workspaces: ["packages/shared", "apps/web"],
}),
"packages/shared/package.json": JSON.stringify({
name: "@test/shared",
version: "1.0.0",
}),
"apps/web/package.json": JSON.stringify({
name: "@test/web",
version: "1.0.0",
dependencies: {
"@test/shared": "workspace:*",
},
}),
"bun.lock": lockfileContent,
});
// Step 3: Run frozen install on the pruned output — this should succeed
const frozenResult = Bun.spawnSync({
cmd: [bunExe(), "install", "--frozen-lockfile", "--ignore-scripts"],
cwd: prunedDir,
env: bunEnv,
});
const stderr = frozenResult.stderr.toString();
expect(stderr).not.toContain("lockfile had changes, but lockfile is frozen");
expect(frozenResult.exitCode).toBe(0);
});
test("frozen-lockfile still fails when a new workspace is added", async () => {
// This test ensures we don't accidentally make frozen-lockfile too permissive.
// If a workspace is ADDED (not just removed), the frozen lockfile check
// should still fail.
const fullDir = tempDirWithFiles("frozen-fail-monorepo", {
"package.json": JSON.stringify({
name: "test-monorepo",
workspaces: ["packages/*"],
}),
"packages/shared/package.json": JSON.stringify({
name: "@test/shared",
version: "1.0.0",
}),
});
// Generate a lockfile with only @test/shared
const installResult = Bun.spawnSync({
cmd: [bunExe(), "install", "--save-text-lockfile", "--ignore-scripts"],
cwd: fullDir,
env: bunEnv,
});
expect(installResult.exitCode).toBe(0);
const lockfileContent = await Bun.file(join(fullDir, "bun.lock")).text();
// Now create a directory with an ADDITIONAL workspace not in the lockfile
const modifiedDir = tempDirWithFiles("frozen-fail-modified", {
"package.json": JSON.stringify({
name: "test-monorepo",
workspaces: ["packages/*"],
}),
"packages/shared/package.json": JSON.stringify({
name: "@test/shared",
version: "1.0.0",
}),
"packages/extra/package.json": JSON.stringify({
name: "@test/extra",
version: "1.0.0",
}),
"bun.lock": lockfileContent,
});
// This should fail because a new workspace was added that's not in the lockfile
const frozenResult = Bun.spawnSync({
cmd: [bunExe(), "install", "--frozen-lockfile", "--ignore-scripts"],
cwd: modifiedDir,
env: bunEnv,
});
const stderr = frozenResult.stderr.toString();
expect(stderr).toContain("lockfile had changes, but lockfile is frozen");
expect(frozenResult.exitCode).not.toBe(0);
});