fix(json): prevent stack overflow in JSONC parser on deeply nested input (#26174)

## Summary
- Add stack overflow protection to JSON/JSONC parser to prevent
segmentation faults
- Parser now throws `RangeError: Maximum call stack size exceeded`
instead of crashing
- Fixes DoS vulnerability when parsing deeply nested JSON structures
(~150k+ depth)

## Test plan
- [x] Added regression tests for deeply nested arrays and objects (25k
depth)
- [x] Verified system Bun v1.3.6 crashes with segfault at 150k depth
- [x] Verified fix throws proper error instead of crashing
- [x] All existing JSONC tests pass

🤖 Generated with [Claude Code](https://claude.ai/code)

---------

Co-authored-by: Claude Bot <claude-bot@bun.sh>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
robobun
2026-01-16 16:23:01 -08:00
committed by GitHub
parent 44df912d37
commit 1344151576
6 changed files with 44 additions and 2 deletions

View File

@@ -37,7 +37,10 @@ pub fn parse(
var input_slice = try input_value.toSlice(globalThis, bun.default_allocator);
defer input_slice.deinit();
const source = &logger.Source.initPathString("input.jsonc", input_slice.slice());
const parse_result = json.parseTSConfig(source, &log, allocator, true) catch {
const parse_result = json.parseTSConfig(source, &log, allocator, true) catch |err| {
if (err == error.StackOverflow) {
return globalThis.throwStackOverflow();
}
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse JSONC"));
};

View File

@@ -36,7 +36,10 @@ pub fn parse(
var input_slice = try arguments[0].toSlice(globalThis, bun.default_allocator);
defer input_slice.deinit();
const source = &logger.Source.initPathString("input.toml", input_slice.slice());
const parse_result = TOML.parse(source, &log, allocator, false) catch {
const parse_result = TOML.parse(source, &log, allocator, false) catch |err| {
if (err == error.StackOverflow) {
return globalThis.throwStackOverflow();
}
return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse toml"));
};

View File

@@ -103,6 +103,7 @@ fn JSONLikeParser_(
log: *logger.Log,
allocator: std.mem.Allocator,
list_allocator: std.mem.Allocator,
stack_check: bun.StackCheck,
pub fn init(allocator: std.mem.Allocator, source_: *const logger.Source, log: *logger.Log) !Parser {
return initWithListAllocator(allocator, allocator, source_, log);
@@ -117,6 +118,7 @@ fn JSONLikeParser_(
.allocator = allocator,
.log = log,
.list_allocator = list_allocator,
.stack_check = bun.StackCheck.init(),
};
}
@@ -127,6 +129,10 @@ fn JSONLikeParser_(
const Parser = @This();
pub fn parseExpr(p: *Parser, comptime maybe_auto_quote: bool, comptime force_utf8: bool) anyerror!Expr {
if (!p.stack_check.isSafeToRecurse()) {
try bun.throwStackOverflow();
}
const loc = p.lexer.loc();
switch (p.lexer.token) {
@@ -318,6 +324,7 @@ pub const PackageJSONVersionChecker = struct {
log: *logger.Log,
allocator: std.mem.Allocator,
depth: usize = 0,
stack_check: bun.StackCheck,
found_version_buf: [1024]u8 = undefined,
found_name_buf: [1024]u8 = undefined,
@@ -343,12 +350,17 @@ pub const PackageJSONVersionChecker = struct {
.allocator = allocator,
.log = log,
.source = source,
.stack_check = bun.StackCheck.init(),
};
}
const Parser = @This();
pub fn parseExpr(p: *Parser) anyerror!Expr {
if (!p.stack_check.isSafeToRecurse()) {
try bun.throwStackOverflow();
}
const loc = p.lexer.loc();
if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc);

View File

@@ -32,12 +32,14 @@ pub const TOML = struct {
lexer: Lexer,
log: *logger.Log,
allocator: std.mem.Allocator,
stack_check: bun.StackCheck,
pub fn init(allocator: std.mem.Allocator, source_: logger.Source, log: *logger.Log, redact_logs: bool) !TOML {
return TOML{
.lexer = try Lexer.init(log, source_, allocator, redact_logs),
.allocator = allocator,
.log = log,
.stack_check = bun.StackCheck.init(),
};
}
@@ -230,6 +232,10 @@ pub const TOML = struct {
}
pub fn parseValue(p: *TOML) anyerror!Expr {
if (!p.stack_check.isSafeToRecurse()) {
try bun.throwStackOverflow();
}
const loc = p.lexer.loc();
p.lexer.allow_double_bracket = true;

View File

@@ -123,3 +123,15 @@ test("Bun.JSONC.parse handles empty array", () => {
const result = Bun.JSONC.parse("[]");
expect(result).toEqual([]);
});
test("Bun.JSONC.parse throws on deeply nested arrays instead of crashing", () => {
const depth = 25_000;
const deepJson = "[".repeat(depth) + "]".repeat(depth);
expect(() => Bun.JSONC.parse(deepJson)).toThrow(RangeError);
});
test("Bun.JSONC.parse throws on deeply nested objects instead of crashing", () => {
const depth = 25_000;
const deepJson = '{"a":'.repeat(depth) + "1" + "}".repeat(depth);
expect(() => Bun.JSONC.parse(deepJson)).toThrow(RangeError);
});

View File

@@ -103,3 +103,9 @@ q1 = 1
});
expect(parsed.items).toEqual([{ q1: 1 }]);
});
it("Bun.TOML.parse throws on deeply nested inline tables instead of crashing", () => {
const depth = 25_000;
const deepToml = "a = " + "{ b = ".repeat(depth) + "1" + " }".repeat(depth);
expect(() => Bun.TOML.parse(deepToml)).toThrow(RangeError);
});