mirror of
https://github.com/oven-sh/bun
synced 2026-02-15 21:32:05 +00:00
Introduce stdin support to Bun.spawn
This commit is contained in:
448
test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts
Normal file
448
test/js/bun/spawn/spawn-stdin-readable-stream-edge-cases.test.ts
Normal file
@@ -0,0 +1,448 @@
|
||||
import { spawn } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
|
||||
describe("spawn stdin ReadableStream edge cases", () => {
|
||||
test("ReadableStream with exception in start", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("before exception\n");
|
||||
throw new Error("Start error");
|
||||
},
|
||||
});
|
||||
|
||||
// The stream should still work with the data before the exception
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("before exception\n");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with exception in pull", async () => {
|
||||
let pullCount = 0;
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
pullCount++;
|
||||
if (pullCount === 1) {
|
||||
controller.enqueue("chunk 1\n");
|
||||
} else if (pullCount === 2) {
|
||||
controller.enqueue("chunk 2\n");
|
||||
throw new Error("Pull error");
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
// Should receive data before the exception
|
||||
expect(text).toContain("chunk 1\n");
|
||||
expect(text).toContain("chunk 2\n");
|
||||
});
|
||||
|
||||
test("ReadableStream writing after process closed", async () => {
|
||||
let writeAttempts = 0;
|
||||
let errorOccurred = false;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
writeAttempts++;
|
||||
if (writeAttempts <= 10) {
|
||||
await Bun.sleep(100);
|
||||
try {
|
||||
controller.enqueue(`attempt ${writeAttempts}\n`);
|
||||
} catch (e) {
|
||||
errorOccurred = true;
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Use a command that exits quickly
|
||||
const proc = spawn({
|
||||
cmd: ["sh", "-c", "head -n 1"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
await proc.exited;
|
||||
|
||||
// Give time for more pull attempts
|
||||
await Bun.sleep(500);
|
||||
|
||||
// The stream should have attempted multiple writes but only the first succeeded
|
||||
expect(writeAttempts).toBeGreaterThanOrEqual(1);
|
||||
expect(text).toBe("attempt 1\n");
|
||||
});
|
||||
|
||||
test("ReadableStream with mixed types", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// String
|
||||
controller.enqueue("text ");
|
||||
// Uint8Array
|
||||
controller.enqueue(new TextEncoder().encode("binary "));
|
||||
// ArrayBuffer
|
||||
const buffer = new ArrayBuffer(5);
|
||||
const view = new Uint8Array(buffer);
|
||||
view.set([100, 97, 116, 97, 32]); // "data "
|
||||
controller.enqueue(buffer);
|
||||
// Another string
|
||||
controller.enqueue("end");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("text binary data end");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with process consuming data slowly", async () => {
|
||||
const chunks: string[] = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
chunks.push(`chunk ${i}\n`);
|
||||
}
|
||||
|
||||
let currentChunk = 0;
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
if (currentChunk < chunks.length) {
|
||||
controller.enqueue(chunks[currentChunk]);
|
||||
currentChunk++;
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Use a script that reads slowly
|
||||
const proc = spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`
|
||||
const readline = require('readline');
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
terminal: false
|
||||
});
|
||||
|
||||
rl.on('line', async (line) => {
|
||||
await Bun.sleep(10);
|
||||
console.log(line);
|
||||
});
|
||||
`,
|
||||
],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
const lines = text.trim().split("\n");
|
||||
expect(lines.length).toBe(10);
|
||||
for (let i = 0; i < 10; i++) {
|
||||
expect(lines[i]).toBe(`chunk ${i}`);
|
||||
}
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with cancel callback verification", async () => {
|
||||
let cancelReason: any = null;
|
||||
let cancelCalled = false;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Start sending data
|
||||
let count = 0;
|
||||
const interval = setInterval(() => {
|
||||
count++;
|
||||
try {
|
||||
controller.enqueue(`data ${count}\n`);
|
||||
} catch (e) {
|
||||
clearInterval(interval);
|
||||
}
|
||||
}, 50);
|
||||
|
||||
// Store interval for cleanup
|
||||
(controller as any).interval = interval;
|
||||
},
|
||||
cancel(reason) {
|
||||
cancelCalled = true;
|
||||
cancelReason = reason;
|
||||
// Clean up interval if exists
|
||||
if ((this as any).interval) {
|
||||
clearInterval((this as any).interval);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Kill the process after some data
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
// Wait a bit then kill
|
||||
await Bun.sleep(150);
|
||||
proc.kill();
|
||||
|
||||
try {
|
||||
await proc.exited;
|
||||
} catch (e) {
|
||||
// Expected - process was killed
|
||||
}
|
||||
|
||||
// Give time for cancel to be called
|
||||
await Bun.sleep(50);
|
||||
|
||||
expect(cancelCalled).toBe(true);
|
||||
});
|
||||
|
||||
test("ReadableStream with high frequency small chunks", async () => {
|
||||
const totalChunks = 1000;
|
||||
let sentChunks = 0;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
// Send multiple small chunks per pull
|
||||
for (let i = 0; i < 10 && sentChunks < totalChunks; i++) {
|
||||
controller.enqueue(`${sentChunks}\n`);
|
||||
sentChunks++;
|
||||
}
|
||||
|
||||
if (sentChunks >= totalChunks) {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["wc", "-l"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(parseInt(text.trim())).toBe(totalChunks);
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with queuing strategy", async () => {
|
||||
let pullCount = 0;
|
||||
|
||||
const stream = new ReadableStream(
|
||||
{
|
||||
pull(controller) {
|
||||
pullCount++;
|
||||
if (pullCount <= 5) {
|
||||
// Enqueue data larger than high water mark
|
||||
controller.enqueue("x".repeat(1024));
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
// Small high water mark to test backpressure
|
||||
highWaterMark: 1024,
|
||||
},
|
||||
);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("x".repeat(1024 * 5));
|
||||
expect(await proc.exited).toBe(0);
|
||||
|
||||
// Should have been pulled exactly as needed
|
||||
expect(pullCount).toBe(5);
|
||||
});
|
||||
|
||||
test("ReadableStream reuse prevention", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("test data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// First use
|
||||
const proc1 = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text1 = await new Response(proc1.stdout).text();
|
||||
expect(text1).toBe("test data");
|
||||
expect(await proc1.exited).toBe(0);
|
||||
|
||||
// Second use should fail
|
||||
expect(() => {
|
||||
spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
});
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
test("ReadableStream with byte stream", async () => {
|
||||
const data = new Uint8Array(256);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
data[i] = i;
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
type: "bytes",
|
||||
start(controller) {
|
||||
// Enqueue as byte chunks
|
||||
controller.enqueue(data.slice(0, 128));
|
||||
controller.enqueue(data.slice(128, 256));
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const buffer = await new Response(proc.stdout).arrayBuffer();
|
||||
const result = new Uint8Array(buffer);
|
||||
expect(result).toEqual(data);
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with stdin and other pipes", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("stdin data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Create a script that also writes to stdout and stderr
|
||||
const script = `
|
||||
process.stdin.on('data', (data) => {
|
||||
process.stdout.write('stdout: ' + data);
|
||||
process.stderr.write('stderr: ' + data);
|
||||
});
|
||||
`;
|
||||
|
||||
const proc = spawn({
|
||||
cmd: [bunExe(), "-e", script],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]);
|
||||
|
||||
expect(stdout).toBe("stdout: stdin data");
|
||||
expect(stderr).toBe("stderr: stdin data");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with very long single chunk", async () => {
|
||||
// Create a chunk larger than typical pipe buffer (64KB on most systems)
|
||||
const size = 256 * 1024; // 256KB
|
||||
const chunk = "a".repeat(size);
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(chunk);
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["wc", "-c"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(parseInt(text.trim())).toBe(size);
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with alternating data types", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Alternate between strings and Uint8Arrays
|
||||
controller.enqueue("string1 ");
|
||||
controller.enqueue(new TextEncoder().encode("binary1 "));
|
||||
controller.enqueue("string2 ");
|
||||
controller.enqueue(new TextEncoder().encode("binary2"));
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("string1 binary1 string2 binary2");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with spawn options variations", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("test input");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Test with different spawn configurations
|
||||
const configs = [
|
||||
{ stdout: "pipe", stderr: "ignore" },
|
||||
{ stdout: "pipe", stderr: "pipe" },
|
||||
{ stdout: "pipe", stderr: "inherit" },
|
||||
];
|
||||
|
||||
for (const config of configs) {
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
...config,
|
||||
});
|
||||
|
||||
const stdout = await new Response(proc.stdout).text();
|
||||
expect(stdout).toBe("test input");
|
||||
expect(await proc.exited).toBe(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,153 @@
|
||||
import { spawn } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
describe("spawn stdin ReadableStream integration", () => {
|
||||
test("example from documentation", async () => {
|
||||
const stream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
await Bun.sleep(1);
|
||||
controller.enqueue("some data from a stream");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
console.log(text); // "some data from a stream"
|
||||
expect(text).toBe("some data from a stream");
|
||||
});
|
||||
|
||||
test("piping HTTP response to process", async () => {
|
||||
// Simulate an HTTP response stream
|
||||
const responseStream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
await Bun.sleep(1);
|
||||
controller.enqueue("Line 1\n");
|
||||
controller.enqueue("Line 2\n");
|
||||
controller.enqueue("Line 3\n");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Count lines using wc -l
|
||||
const proc = spawn({
|
||||
cmd: ["wc", "-l"],
|
||||
stdin: responseStream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const output = await new Response(proc.stdout).text();
|
||||
expect(parseInt(output.trim())).toBe(3);
|
||||
});
|
||||
|
||||
test("transforming data before passing to process", async () => {
|
||||
// Original data stream
|
||||
const dataStream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
await Bun.sleep(1);
|
||||
controller.enqueue("hello world");
|
||||
controller.enqueue("\n");
|
||||
controller.enqueue("foo bar");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Transform to uppercase
|
||||
const upperCaseTransform = new TransformStream({
|
||||
transform(chunk, controller) {
|
||||
controller.enqueue(chunk.toUpperCase());
|
||||
},
|
||||
});
|
||||
|
||||
// Pipe through transform then to process
|
||||
const transformedStream = dataStream.pipeThrough(upperCaseTransform);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: transformedStream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const result = await new Response(proc.stdout).text();
|
||||
expect(result).toBe("HELLO WORLD\nFOO BAR");
|
||||
});
|
||||
|
||||
test("streaming large file through process", async () => {
|
||||
// Simulate streaming a large file in chunks
|
||||
const chunkSize = 1024;
|
||||
const numChunks = 100;
|
||||
let currentChunk = 0;
|
||||
|
||||
const fileStream = new ReadableStream({
|
||||
pull(controller) {
|
||||
if (currentChunk < numChunks) {
|
||||
// Simulate file chunk
|
||||
controller.enqueue(`Chunk ${currentChunk}: ${"x".repeat(chunkSize - 20)}\n`);
|
||||
currentChunk++;
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Process the stream (e.g., compress it)
|
||||
const proc = spawn({
|
||||
cmd: ["gzip"],
|
||||
stdin: fileStream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
// Decompress to verify
|
||||
const decompress = spawn({
|
||||
cmd: ["gunzip"],
|
||||
stdin: proc.stdout,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const result = await new Response(decompress.stdout).text();
|
||||
const lines = result.trim().split("\n");
|
||||
expect(lines.length).toBe(numChunks);
|
||||
expect(lines[0]).toStartWith("Chunk 0:");
|
||||
expect(lines[99]).toStartWith("Chunk 99:");
|
||||
});
|
||||
|
||||
test("real-time data processing", async () => {
|
||||
let dataPoints = 0;
|
||||
const maxDataPoints = 5;
|
||||
|
||||
// Simulate real-time data stream
|
||||
const dataStream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
if (dataPoints < maxDataPoints) {
|
||||
const timestamp = Date.now();
|
||||
const value = Math.random() * 100;
|
||||
controller.enqueue(`${timestamp},${value.toFixed(2)}\n`);
|
||||
dataPoints++;
|
||||
|
||||
// Simulate real-time delay
|
||||
await Bun.sleep(10);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Process the CSV data
|
||||
const proc = spawn({
|
||||
cmd: ["awk", "-F,", "{ sum += $2; count++ } END { print sum/count }"],
|
||||
stdin: dataStream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const avgStr = await new Response(proc.stdout).text();
|
||||
const avg = parseFloat(avgStr.trim());
|
||||
|
||||
// Average should be between 0 and 100
|
||||
expect(avg).toBeGreaterThanOrEqual(0);
|
||||
expect(avg).toBeLessThanOrEqual(100);
|
||||
});
|
||||
});
|
||||
33
test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts
Normal file
33
test/js/bun/spawn/spawn-stdin-readable-stream-sync.test.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { spawnSync } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
|
||||
describe("spawnSync with ReadableStream stdin", () => {
|
||||
test("spawnSync should throw or handle ReadableStream appropriately", () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("test data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// spawnSync with ReadableStream should either:
|
||||
// 1. Throw an error because async streams can't be used synchronously
|
||||
// 2. Handle it in some special way
|
||||
|
||||
try {
|
||||
const result = spawnSync({
|
||||
cmd: ["cat"],
|
||||
stdin: stream as any, // Type assertion because it may not be in the types yet
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
// If it doesn't throw, check what happens
|
||||
if (result.stdout) {
|
||||
console.log("spawnSync accepted ReadableStream, output:", result.stdout.toString());
|
||||
}
|
||||
} catch (error: any) {
|
||||
// This is expected - spawnSync shouldn't support async ReadableStream
|
||||
expect(error.message).toContain("ReadableStream");
|
||||
}
|
||||
});
|
||||
});
|
||||
586
test/js/bun/spawn/spawn-stdin-readable-stream.test.ts
Normal file
586
test/js/bun/spawn/spawn-stdin-readable-stream.test.ts
Normal file
@@ -0,0 +1,586 @@
|
||||
import { spawn } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { expectMaxObjectTypeCount, getMaxFD } from "harness";
|
||||
|
||||
describe("spawn stdin ReadableStream", () => {
|
||||
test("basic ReadableStream as stdin", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("hello from stream");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("hello from stream");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with multiple chunks", async () => {
|
||||
const chunks = ["chunk1\n", "chunk2\n", "chunk3\n"];
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
for (const chunk of chunks) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe(chunks.join(""));
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with Uint8Array chunks", async () => {
|
||||
const encoder = new TextEncoder();
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(encoder.encode("binary "));
|
||||
controller.enqueue(encoder.encode("data "));
|
||||
controller.enqueue(encoder.encode("stream"));
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("binary data stream");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with delays between chunks", async () => {
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
controller.enqueue("first\n");
|
||||
await Bun.sleep(50);
|
||||
controller.enqueue("second\n");
|
||||
await Bun.sleep(50);
|
||||
controller.enqueue("third\n");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("first\nsecond\nthird\n");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with pull method", async () => {
|
||||
let pullCount = 0;
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
pullCount++;
|
||||
if (pullCount <= 3) {
|
||||
controller.enqueue(`pull ${pullCount}\n`);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("pull 1\npull 2\npull 3\n");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with async pull and delays", async () => {
|
||||
let pullCount = 0;
|
||||
const stream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
pullCount++;
|
||||
if (pullCount <= 3) {
|
||||
await Bun.sleep(30);
|
||||
controller.enqueue(`async pull ${pullCount}\n`);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("async pull 1\nasync pull 2\nasync pull 3\n");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with large data", async () => {
|
||||
const largeData = "x".repeat(1024 * 1024); // 1MB
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(largeData);
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe(largeData);
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with very large chunked data", async () => {
|
||||
const chunkSize = 64 * 1024; // 64KB chunks
|
||||
const numChunks = 16; // 1MB total
|
||||
let pushedChunks = 0;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
if (pushedChunks < numChunks) {
|
||||
controller.enqueue("x".repeat(chunkSize));
|
||||
pushedChunks++;
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text.length).toBe(chunkSize * numChunks);
|
||||
expect(text).toBe("x".repeat(chunkSize * numChunks));
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream cancellation when process exits early", async () => {
|
||||
let cancelled = false;
|
||||
let chunksEnqueued = 0;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
// Keep enqueueing data slowly
|
||||
await Bun.sleep(50);
|
||||
chunksEnqueued++;
|
||||
controller.enqueue(`chunk ${chunksEnqueued}\n`);
|
||||
},
|
||||
cancel(reason) {
|
||||
cancelled = true;
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["head", "-n", "2"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
await proc.exited;
|
||||
|
||||
// Give some time for cancellation to happen
|
||||
await Bun.sleep(100);
|
||||
|
||||
expect(cancelled).toBe(true);
|
||||
expect(chunksEnqueued).toBeGreaterThanOrEqual(2);
|
||||
// head -n 2 should only output 2 lines
|
||||
expect(text.trim().split("\n").length).toBe(2);
|
||||
});
|
||||
|
||||
test("ReadableStream error handling", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("before error\n");
|
||||
controller.error(new Error("Stream error"));
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
// Process should receive data before the error
|
||||
expect(text).toBe("before error\n");
|
||||
|
||||
// Process should exit normally (the stream error happens after data is sent)
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with process that exits immediately", async () => {
|
||||
let cancelled = false;
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Enqueue a lot of data
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
controller.enqueue(`line ${i}\n`);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
cancel() {
|
||||
cancelled = true;
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["true"], // exits immediately
|
||||
stdin: stream,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(0);
|
||||
|
||||
// Give time for any pending operations
|
||||
await Bun.sleep(50);
|
||||
|
||||
// The stream might be cancelled since the process exits before reading
|
||||
// This is implementation-dependent behavior
|
||||
});
|
||||
|
||||
test("ReadableStream with process that fails", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("data for failing process\n");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["sh", "-c", "exit 1"],
|
||||
stdin: stream,
|
||||
});
|
||||
|
||||
expect(await proc.exited).toBe(1);
|
||||
});
|
||||
|
||||
test("already disturbed ReadableStream throws error", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Disturb the stream by getting a reader
|
||||
const reader = stream.getReader();
|
||||
reader.releaseLock();
|
||||
|
||||
expect(() => {
|
||||
spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
});
|
||||
}).toThrow("stdin ReadableStream is already disturbed");
|
||||
});
|
||||
|
||||
test("ReadableStream with abort signal", async () => {
|
||||
const controller = new AbortController();
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("data before abort\n");
|
||||
},
|
||||
pull(controller) {
|
||||
// Keep the stream open
|
||||
controller.enqueue("more data\n");
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
// Give it some time to start
|
||||
await Bun.sleep(50);
|
||||
|
||||
// Abort the process
|
||||
controller.abort();
|
||||
|
||||
try {
|
||||
await proc.exited;
|
||||
} catch (e) {
|
||||
// Process was aborted
|
||||
}
|
||||
|
||||
// The process should have been killed
|
||||
expect(proc.killed).toBe(true);
|
||||
});
|
||||
|
||||
test("ReadableStream with backpressure", async () => {
|
||||
let pullCalls = 0;
|
||||
let totalBytesEnqueued = 0;
|
||||
const chunkSize = 64 * 1024; // 64KB chunks
|
||||
|
||||
const stream = new ReadableStream({
|
||||
pull(controller) {
|
||||
pullCalls++;
|
||||
if (totalBytesEnqueued < 1024 * 1024 * 2) {
|
||||
// 2MB total
|
||||
const chunk = "x".repeat(chunkSize);
|
||||
controller.enqueue(chunk);
|
||||
totalBytesEnqueued += chunk.length;
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Use a slow reader to create backpressure
|
||||
const proc = spawn({
|
||||
cmd: ["sh", "-c", 'while IFS= read -r line; do echo "$line"; sleep 0.01; done'],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const startTime = Date.now();
|
||||
let outputLength = 0;
|
||||
|
||||
const reader = proc.stdout.getReader();
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) {
|
||||
outputLength += value.length;
|
||||
// Break after some data to not wait forever
|
||||
if (outputLength > chunkSize * 2) break;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
|
||||
proc.kill();
|
||||
await proc.exited;
|
||||
|
||||
// The pull method should have been called multiple times due to backpressure
|
||||
expect(pullCalls).toBeGreaterThan(1);
|
||||
});
|
||||
|
||||
test("ReadableStream with multiple processes", async () => {
|
||||
const stream1 = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("stream1 data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const stream2 = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("stream2 data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc1 = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream1,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const proc2 = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream2,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const [text1, text2] = await Promise.all([new Response(proc1.stdout).text(), new Response(proc2.stdout).text()]);
|
||||
|
||||
expect(text1).toBe("stream1 data");
|
||||
expect(text2).toBe("stream2 data");
|
||||
expect(await proc1.exited).toBe(0);
|
||||
expect(await proc2.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream file descriptor cleanup", async () => {
|
||||
const maxFD = getMaxFD();
|
||||
const iterations = 10;
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(`iteration ${i}\n`);
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe(`iteration ${i}\n`);
|
||||
expect(await proc.exited).toBe(0);
|
||||
}
|
||||
|
||||
// Force garbage collection
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(50);
|
||||
|
||||
// Check that we didn't leak file descriptors
|
||||
const newMaxFD = getMaxFD();
|
||||
expect(newMaxFD).toBeLessThanOrEqual(maxFD + 10); // Allow some variance
|
||||
});
|
||||
|
||||
test("ReadableStream with empty stream", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Close immediately without enqueueing anything
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with null bytes", async () => {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new Uint8Array([72, 101, 108, 108, 111, 0, 87, 111, 114, 108, 100])); // "Hello\0World"
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const buffer = await new Response(proc.stdout).arrayBuffer();
|
||||
const bytes = new Uint8Array(buffer);
|
||||
expect(bytes).toEqual(new Uint8Array([72, 101, 108, 108, 111, 0, 87, 111, 114, 108, 100]));
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with transform stream", async () => {
|
||||
// Create a transform stream that uppercases text
|
||||
const upperCaseTransform = new TransformStream({
|
||||
transform(chunk, controller) {
|
||||
controller.enqueue(chunk.toUpperCase());
|
||||
},
|
||||
});
|
||||
|
||||
const originalStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("hello ");
|
||||
controller.enqueue("world");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const transformedStream = originalStream.pipeThrough(upperCaseTransform);
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: transformedStream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
const text = await new Response(proc.stdout).text();
|
||||
expect(text).toBe("HELLO WORLD");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream with tee", async () => {
|
||||
const originalStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("shared data");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const [stream1, stream2] = originalStream.tee();
|
||||
|
||||
// Use the first branch for the process
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream1,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
// Read from the second branch independently
|
||||
const text2 = await new Response(stream2).text();
|
||||
|
||||
const text1 = await new Response(proc.stdout).text();
|
||||
expect(text1).toBe("shared data");
|
||||
expect(text2).toBe("shared data");
|
||||
expect(await proc.exited).toBe(0);
|
||||
});
|
||||
|
||||
test("ReadableStream object type count", async () => {
|
||||
const iterations = 5;
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(`iteration ${i}`);
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const proc = spawn({
|
||||
cmd: ["cat"],
|
||||
stdin: stream,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
await new Response(proc.stdout).text();
|
||||
await proc.exited;
|
||||
}
|
||||
|
||||
// Force cleanup
|
||||
Bun.gc(true);
|
||||
await Bun.sleep(100);
|
||||
|
||||
// Check that we're not leaking objects
|
||||
await expectMaxObjectTypeCount(expect, "ReadableStream", 10);
|
||||
await expectMaxObjectTypeCount(expect, "Subprocess", 5);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user