Compare commits

..

1 Commits

Author SHA1 Message Date
Claude Bot
c7273a1893 fix(ffi): CString.byteLength and CString.byteOffset are undefined when not explicitly passed
When constructing a CString with only a pointer argument, byteOffset and
byteLength were left as undefined because they were only set when
explicitly passed. Now byteOffset defaults to 0 and byteLength is
computed from the string's UTF-8 byte length when not provided.

Closes #22920

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-20 04:40:50 +00:00
4 changed files with 73 additions and 210 deletions

View File

@@ -2254,11 +2254,6 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
defer if (content_type_needs_free) content_type.deinit(this.allocator);
var has_content_disposition = false;
var has_content_range = false;
// Save user-provided Content-Length before writeHeaders strips it.
// This is needed for ReadableStream bodies where the user knows the
// total size upfront (e.g. proxy responses). For known-size bodies
// (blob/string), tryEnd() will set Content-Length from the actual size.
var user_content_length: ?usize = null;
if (response.swapInitHeaders()) |headers_| {
defer headers_.deref();
has_content_disposition = headers_.fastHas(.ContentDisposition);
@@ -2268,14 +2263,6 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
status = 206;
}
if (this.blob.isDetached()) {
if (headers_.fastGet(.ContentLength)) |cl| {
const cl_str = cl.toSlice(this.allocator);
defer cl_str.deinit();
user_content_length = std.fmt.parseInt(usize, cl_str.slice(), 10) catch null;
}
}
this.doWriteStatus(status);
this.doWriteHeaders(headers_);
} else if (needs_content_range) {
@@ -2319,12 +2306,6 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
if (this.flags.needs_content_length) {
resp.writeHeaderInt("content-length", size);
this.flags.needs_content_length = false;
} else if (user_content_length) |cl| {
// For ReadableStream bodies where the user explicitly set
// Content-Length, write it and tell uWS to use content-length
// framing instead of chunked transfer encoding.
resp.writeHeaderInt("content-length", cl);
resp.markWroteContentLengthHeader();
}
if (needs_content_range and !has_content_range) {

View File

@@ -117,19 +117,20 @@ class JSCallback {
class CString extends String {
constructor(ptr, byteOffset?, byteLength?) {
super(
ptr
? typeof byteLength === "number" && Number.isSafeInteger(byteLength)
? BunCString(ptr, byteOffset || 0, byteLength)
: BunCString(ptr, byteOffset || 0)
: "",
);
const str = ptr
? typeof byteLength === "number" && Number.isSafeInteger(byteLength)
? BunCString(ptr, byteOffset || 0, byteLength)
: BunCString(ptr, byteOffset || 0)
: "";
super(str);
this.ptr = typeof ptr === "number" ? ptr : 0;
if (typeof byteOffset !== "undefined") {
this.byteOffset = byteOffset;
}
if (typeof byteLength !== "undefined") {
this.byteOffset = typeof byteOffset === "number" ? byteOffset : 0;
if (typeof byteLength === "number") {
this.byteLength = byteLength;
} else if (this.ptr) {
this.byteLength = Buffer.byteLength(str, "utf8");
} else {
this.byteLength = 0;
}
}

View File

@@ -1,180 +0,0 @@
import { expect, test } from "bun:test";
// Helper to get raw HTTP response headers via TCP socket
async function getRawResponse(port: number, path: string = "/"): Promise<{ headers: string; body: Buffer }> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
const timeout = setTimeout(() => reject(new Error("Timeout")), 5000);
Bun.connect({
hostname: "localhost",
port,
socket: {
data(_socket, data) {
chunks.push(Buffer.from(data));
},
open(socket) {
socket.write(`GET ${path} HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n`);
},
close() {
clearTimeout(timeout);
const full = Buffer.concat(chunks);
const headerEnd = full.indexOf("\r\n\r\n");
if (headerEnd === -1) {
reject(new Error("No header terminator found"));
return;
}
resolve({
headers: full.subarray(0, headerEnd).toString("utf8"),
body: full.subarray(headerEnd + 4),
});
},
error(_socket, err) {
clearTimeout(timeout);
reject(err);
},
},
});
});
}
function getHeader(rawHeaders: string, name: string): string | null {
const regex = new RegExp(`^${name}:\\s*(.+)$`, "mi");
const match = rawHeaders.match(regex);
return match ? match[1].trim() : null;
}
test("large streaming ReadableStream preserves user-set Content-Length", async () => {
// Use a large body (1MB) that can't be eagerly buffered into a blob
const chunkSize = 1024;
const totalChunks = 1024;
const totalSize = chunkSize * totalChunks;
const chunk = new Uint8Array(chunkSize).fill(65); // 'A'
using server = Bun.serve({
port: 0,
async fetch() {
let remaining = totalChunks;
const stream = new ReadableStream({
pull(controller) {
if (remaining <= 0) {
controller.close();
return;
}
remaining--;
controller.enqueue(chunk);
},
});
return new Response(stream, {
headers: {
"Content-Length": String(totalSize),
"Content-Type": "application/octet-stream",
},
});
},
});
const { headers, body } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(totalSize));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(body.length).toBe(totalSize);
});
test("large streaming ReadableStream without Content-Length uses chunked encoding", async () => {
const chunkSize = 1024;
const totalChunks = 1024;
const chunk = new Uint8Array(chunkSize).fill(65);
using server = Bun.serve({
port: 0,
async fetch() {
let remaining = totalChunks;
const stream = new ReadableStream({
pull(controller) {
if (remaining <= 0) {
controller.close();
return;
}
remaining--;
controller.enqueue(chunk);
},
});
return new Response(stream, {
headers: {
"Content-Type": "application/octet-stream",
},
});
},
});
const { headers } = await getRawResponse(server.port);
// Without explicit Content-Length, chunked encoding should be used
expect(getHeader(headers, "transfer-encoding")).toBe("chunked");
expect(getHeader(headers, "content-length")).toBeNull();
});
test("async ReadableStream with delay preserves user-set Content-Length", async () => {
const body = "Hello, World!";
const bodyBytes = new TextEncoder().encode(body);
using server = Bun.serve({
port: 0,
async fetch() {
const stream = new ReadableStream({
async pull(controller) {
// Delay to ensure the stream is not eagerly consumed
await Bun.sleep(10);
controller.enqueue(bodyBytes);
controller.close();
},
});
return new Response(stream, {
headers: {
"Content-Length": String(bodyBytes.length),
"Content-Type": "text/plain",
},
});
},
});
const { headers, body: respBody } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(bodyBytes.length));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(respBody.toString()).toBe(body);
});
test("multi-chunk async ReadableStream preserves user-set Content-Length", async () => {
const parts = ["Hello, ", "World", "!"];
const totalSize = parts.reduce((sum, p) => sum + new TextEncoder().encode(p).length, 0);
using server = Bun.serve({
port: 0,
async fetch() {
let index = 0;
const stream = new ReadableStream({
async pull(controller) {
if (index >= parts.length) {
controller.close();
return;
}
await Bun.sleep(5);
controller.enqueue(new TextEncoder().encode(parts[index]));
index++;
},
});
return new Response(stream, {
headers: {
"Content-Length": String(totalSize),
},
});
},
});
const { headers, body } = await getRawResponse(server.port);
expect(getHeader(headers, "content-length")).toBe(String(totalSize));
expect(getHeader(headers, "transfer-encoding")).toBeNull();
expect(body.toString()).toBe("Hello, World!");
});

View File

@@ -0,0 +1,61 @@
import { CString, ptr } from "bun:ffi";
import { expect, test } from "bun:test";
test("CString byteLength and byteOffset are defined when constructed with only a pointer", () => {
const buffer = Buffer.from("Hello world!\0");
const bufferPtr = ptr(buffer);
const cString = new CString(bufferPtr);
expect(cString.byteOffset).toBe(0);
expect(cString.byteLength).toBe(12);
expect(cString.toString()).toBe("Hello world!");
});
test("CString byteOffset defaults to 0 when only ptr and byteLength are provided", () => {
const buffer = Buffer.from("Hello world!\0");
const bufferPtr = ptr(buffer);
const cString = new CString(bufferPtr, 0, 12);
expect(cString.byteOffset).toBe(0);
expect(cString.byteLength).toBe(12);
expect(cString.toString()).toBe("Hello world!");
});
test("CString with byteOffset", () => {
const buffer = Buffer.from("Hello world!\0");
const bufferPtr = ptr(buffer);
const cString = new CString(bufferPtr, 6);
expect(cString.byteOffset).toBe(6);
expect(cString.byteLength).toBe(6);
expect(cString.toString()).toBe("world!");
});
test("CString with byteOffset and byteLength", () => {
const buffer = Buffer.from("Hello world!\0");
const bufferPtr = ptr(buffer);
const cString = new CString(bufferPtr, 6, 5);
expect(cString.byteOffset).toBe(6);
expect(cString.byteLength).toBe(5);
expect(cString.toString()).toBe("world");
});
test("CString with null pointer has byteLength 0 and byteOffset 0", () => {
const cString = new CString(0);
expect(cString.byteOffset).toBe(0);
expect(cString.byteLength).toBe(0);
expect(cString.toString()).toBe("");
});
test("CString byteLength is correct for multi-byte UTF-8 strings", () => {
// "café" in UTF-8 is 5 bytes (c=1, a=1, f=1, é=2)
const buffer = Buffer.from("café\0");
const bufferPtr = ptr(buffer);
const cString = new CString(bufferPtr);
expect(cString.byteOffset).toBe(0);
expect(cString.byteLength).toBe(5);
expect(cString.toString()).toBe("café");
});