Compare commits

...

4 Commits

Author SHA1 Message Date
Sosuke Suzuki
417e546b1f fix: use default export for end-of-stream module
end-of-stream exports the eos function as default export, not as
a named 'finished' export. Using { finished } destructuring caused
ERR_INVALID_ARG_TYPE because the callback was passed as options.
2026-02-25 14:20:20 +09:00
SUZUKI Sosuke
278531c321 Merge branch 'main' into claude/stream-lazy-init 2026-02-24 16:41:44 +09:00
Sosuke Suzuki
8c0a5be472 perf: avoid loading full node:stream in more modules
Apply the same optimization to:
- node/zlib.ts: Transform, finished
- node/_http_server.ts: Duplex, Stream
- node/_http2_upgrade.ts: Duplex
- node/http2.ts: Readable, Duplex, Stream
- thirdparty/undici.js: Readable
- builtins/CompressionStream.ts: Readable, Writable
- builtins/DecompressionStream.ts: Readable, Writable

Each file now requires only the specific internal/streams/*
submodules it needs instead of the full node:stream module.
2026-02-24 15:17:24 +09:00
Sosuke Suzuki
e67b06a34a perf: avoid loading full node:stream in internal/fs/streams.ts
Replace require('node:stream') with direct submodule requires:
- require('internal/streams/readable')
- require('internal/streams/writable')
- require('internal/streams/end-of-stream')

This avoids eagerly loading unused submodules (Transform, Duplex,
PassThrough, pipeline, operators, etc.) when accessing process.stdin,
fs.createReadStream, or fs.createWriteStream.

Benchmark shows ~0.5ms improvement in process.stdin startup cost.
2026-02-24 15:01:38 +09:00
9 changed files with 69 additions and 15 deletions

View File

@@ -0,0 +1,49 @@
// Measures the startup cost of accessing process.stdin
// Each iteration spawns a fresh subprocess to avoid module cache effects.
import { spawnSync } from "bun";
const BUN = process.execPath;
const N = 50;
function measure(label, code) {
const times = [];
for (let i = 0; i < N; i++) {
const result = spawnSync({
cmd: [BUN, "-e", code],
stdout: "pipe",
stderr: "pipe",
});
const t = parseFloat(new TextDecoder().decode(result.stdout));
if (!isNaN(t)) times.push(t);
}
times.sort((a, b) => a - b);
const median = times[Math.floor(times.length / 2)];
const mean = times.reduce((a, b) => a + b, 0) / times.length;
const min = times[0];
const max = times[times.length - 1];
const p95 = times[Math.floor(times.length * 0.95)];
console.log(
`${label.padEnd(30)} median=${median.toFixed(3)}ms mean=${mean.toFixed(3)}ms min=${min.toFixed(3)}ms max=${max.toFixed(3)}ms p95=${p95.toFixed(3)}ms (n=${times.length})`,
);
}
console.log(`Bun: ${BUN}`);
console.log(`Iterations: ${N}\n`);
// Baseline: empty script (process startup overhead only)
measure("baseline (empty)", `process.stdout.write(String(0))`);
// Access process.stdin which triggers internal/fs/streams + node:stream loading
measure("process.stdin", `const t=performance.now();process.stdin;process.stdout.write(String(performance.now()-t))`);
// Also measure require("node:stream") directly
measure(
"require('node:stream')",
`const t=performance.now();require('node:stream');process.stdout.write(String(performance.now()-t))`,
);
// Measure just require("node:fs").createReadStream (also loads internal/fs/streams)
measure(
"fs.createReadStream",
`const t=performance.now();require('node:fs').createReadStream;process.stdout.write(String(performance.now()-t))`,
);

View File

@@ -1,6 +1,7 @@
export function initializeCompressionStream(this, format) {
const zlib = require("node:zlib");
const stream = require("node:stream");
const Readable = require("internal/streams/readable");
const Writable = require("internal/streams/writable");
const builders = {
"deflate": zlib.createDeflate,
@@ -14,8 +15,8 @@ export function initializeCompressionStream(this, format) {
throw $ERR_INVALID_ARG_VALUE("format", format, "must be one of: " + Object.keys(builders).join(", "));
const handle = builders[format]();
$putByIdDirectPrivate(this, "readable", stream.Readable.toWeb(handle));
$putByIdDirectPrivate(this, "writable", stream.Writable.toWeb(handle));
$putByIdDirectPrivate(this, "readable", Readable.toWeb(handle));
$putByIdDirectPrivate(this, "writable", Writable.toWeb(handle));
return this;
}

View File

@@ -1,6 +1,7 @@
export function initializeDecompressionStream(this, format) {
const zlib = require("node:zlib");
const stream = require("node:stream");
const Readable = require("internal/streams/readable");
const Writable = require("internal/streams/writable");
const builders = {
"deflate": zlib.createInflate,
@@ -14,8 +15,8 @@ export function initializeDecompressionStream(this, format) {
throw $ERR_INVALID_ARG_VALUE("format", format, "must be one of: " + Object.keys(builders).join(", "));
const handle = builders[format]();
$putByIdDirectPrivate(this, "readable", stream.Readable.toWeb(handle));
$putByIdDirectPrivate(this, "writable", stream.Writable.toWeb(handle));
$putByIdDirectPrivate(this, "readable", Readable.toWeb(handle));
$putByIdDirectPrivate(this, "writable", Writable.toWeb(handle));
return this;
}

View File

@@ -1,6 +1,8 @@
// fs.ReadStream and fs.WriteStream are lazily loaded to avoid importing 'node:stream' until required
import type { FileSink } from "bun";
const { Readable, Writable, finished } = require("node:stream");
const Readable = require("internal/streams/readable");
const Writable = require("internal/streams/writable");
const finished = require("internal/streams/end-of-stream");
const fs: typeof import("node:fs") = require("node:fs");
const { read, write, fsync, writev } = fs;
const { FileHandle, kRef, kUnref, kFd } = (fs.promises as any).$data as {

View File

@@ -1,4 +1,4 @@
const { Duplex } = require("node:stream");
const Duplex = require("internal/streams/duplex");
const upgradeDuplexToTLS = $newZigFunction("socket.zig", "jsUpgradeDuplexToTLS", 2);
interface NativeHandle {

View File

@@ -1,6 +1,7 @@
// Hardcoded module "node:_http_server"
const EventEmitter: typeof import("node:events").EventEmitter = require("node:events");
const { Duplex, Stream } = require("node:stream");
const Duplex = require("internal/streams/duplex");
const { Stream } = require("internal/streams/legacy");
const { _checkInvalidHeaderChar: checkInvalidHeaderChar } = require("node:_http_common");
const { validateObject, validateLinkHeaderValue, validateBoolean, validateInteger } = require("internal/validators");
const { ConnResetException } = require("internal/shared");

View File

@@ -41,8 +41,8 @@ const kProxySocket = Symbol("proxySocket");
const kSessions = Symbol("sessions");
const kQuotedString = /^[\x09\x20-\x5b\x5d-\x7e\x80-\xff]*$/;
const MAX_ADDITIONAL_SETTINGS = 10;
const Stream = require("node:stream");
const { Readable } = Stream;
const { Stream } = require("internal/streams/legacy");
const Readable = require("internal/streams/readable");
type Http2ConnectOptions = {
settings?: Settings;
protocol?: "https:" | "http:";
@@ -51,7 +51,7 @@ type Http2ConnectOptions = {
const TLSSocket = tls.TLSSocket;
const Socket = net.Socket;
const EventEmitter = require("node:events");
const { Duplex } = Stream;
const Duplex = require("internal/streams/duplex");
const { SafeArrayIterator, SafeSet } = require("internal/primordials");
const { promisify } = require("internal/promisify");

View File

@@ -22,7 +22,8 @@ const isArrayBufferView = ArrayBufferIsView;
const isAnyArrayBuffer = b => b instanceof ArrayBuffer || b instanceof SharedArrayBuffer;
const kMaxLength = $requireMap.$get("buffer")?.exports.kMaxLength ?? BufferModule.kMaxLength;
const { Transform, finished } = require("node:stream");
const Transform = require("internal/streams/transform");
const finished = require("internal/streams/end-of-stream");
const owner_symbol = Symbol("owner_symbol");
const { checkRangesOrGetDefault, validateFunction, validateFiniteNumber } = require("internal/validators");

View File

@@ -1,6 +1,5 @@
const EventEmitter = require("node:events");
const StreamModule = require("node:stream");
const { Readable } = StreamModule;
const Readable = require("internal/streams/readable");
const { _ReadableFromWeb: ReadableFromWeb } = require("internal/webstreams_adapters");
const ObjectCreate = Object.create;